use of com.alibaba.otter.shared.etl.model.DbBatch in project otter by alibaba.
the class HttpPipeIntegration method test_rowData.
@Test
public void test_rowData() {
final Node currentNode = new Node();
currentNode.setId(1L);
currentNode.setIp("127.0.0.1");
currentNode.setParameters(new NodeParameter());
final Pipeline pipeline = new Pipeline();
pipeline.getParameters().setRetriever(RetrieverType.ARIA2C);
// mock一下
new NonStrictExpectations() {
{
configClientService.currentNode();
returns(currentNode);
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
File localFile = new File(tmp, "httpPipeTest.jpg");
FileData localFileData = new FileData();
localFileData.setPath(localFile.getPath());
localFileData.setEventType(EventType.INSERT);
localFileData.setLastModifiedTime(new Date().getTime());
localFileData.setSize(100L);
localFileData.setTableId(1L);
fileBatch.getFiles().add(localFileData);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
EventData eventData = new EventData();
eventData.setTableId(1L);
eventData.setSchemaName("otter");
eventData.setTableName("test");
eventData.setEventType(EventType.INSERT);
eventData.setExecuteTime(100L);
EventColumn primaryKey = new EventColumn();
primaryKey.setColumnName("id");
primaryKey.setColumnType(1);
primaryKey.setColumnValue("1");
primaryKey.setKey(true);
primaryKey.setNull(false);
eventData.getKeys().add(primaryKey);
EventColumn column = new EventColumn();
column.setColumnName("name");
column.setColumnType(1);
column.setColumnValue("test");
column.setKey(false);
column.setNull(false);
eventData.getColumns().add(column);
rowBatch.merge(eventData);
DbBatch dbBatch = new DbBatch();
dbBatch.setRowBatch(rowBatch);
dbBatch.setFileBatch(fileBatch);
HttpPipeKey key = rowDataHttpPipe.put(dbBatch);
DbBatch target = rowDataHttpPipe.get(key);
want.bool(target.getRowBatch().getIdentity().equals(identity));
want.object(target).notNull();
}
use of com.alibaba.otter.shared.etl.model.DbBatch in project otter by alibaba.
the class MemoryPipeTest method test_ok.
@Test
public void test_ok() {
RowDataMemoryPipe pipe = new RowDataMemoryPipe();
pipe.setDownloadDir(tmp);
try {
pipe.afterPropertiesSet();
} catch (Exception e) {
want.fail();
}
DbBatch source = new DbBatch();
RowBatch rowBatch = new RowBatch();
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
rowBatch.setIdentity(identity);
source.setRowBatch(rowBatch);
MemoryPipeKey key = pipe.put(source);
DbBatch target = pipe.get(key);
// 引用为同一个
want.bool(source == target).is(true);
}
use of com.alibaba.otter.shared.etl.model.DbBatch in project otter by alibaba.
the class RpcPipeTest method test_ok.
@Test
public void test_ok() {
final DbBatch source = new DbBatch();
RowBatch rowBatch = new RowBatch();
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
rowBatch.setIdentity(identity);
source.setRowBatch(rowBatch);
final RowDataRpcPipe pipe = new RowDataRpcPipe();
try {
pipe.afterPropertiesSet();
} catch (Exception e) {
want.fail();
}
Mockit.setUpMock(NodeCommmunicationClient.class, new Object() {
@Mock
public Object call(Long nid, final Event event) {
try {
return TestUtils.invokeMethod(pipe, "onGet", event);
} catch (Exception e) {
want.fail();
}
return null;
}
});
Mockit.setUpMock(RowDataRpcPipe.class, new Object() {
@Mock
private Long getNid() {
return 1L;
}
});
pipe.setNodeCommmunicationClient(new NodeCommmunicationClient());
RpcPipeKey key = pipe.put(source);
DbBatch target = pipe.get(key);
// identify相等
want.bool(source.getRowBatch().getIdentity().equals(target.getRowBatch().getIdentity())).is(true);
}
use of com.alibaba.otter.shared.etl.model.DbBatch in project otter by alibaba.
the class RpcPipeTest method test_timeout.
@Test
public void test_timeout() {
final DbBatch source = new DbBatch();
RowBatch rowBatch = new RowBatch();
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
rowBatch.setIdentity(identity);
source.setRowBatch(rowBatch);
final RowDataRpcPipe pipe = new RowDataRpcPipe();
// 1s后超时
pipe.setTimeout(1 * 1000L);
try {
pipe.afterPropertiesSet();
} catch (Exception e) {
want.fail();
}
Mockit.setUpMock(NodeCommmunicationClient.class, new Object() {
@Mock
public Object call(Long nid, final Event event) {
try {
return TestUtils.invokeMethod(pipe, "onGet", event);
} catch (Exception e) {
want.fail();
}
return null;
}
});
Mockit.setUpMock(RowDataRpcPipe.class, new Object() {
@Mock
private Long getNid() {
return 1L;
}
});
pipe.setNodeCommmunicationClient(new NodeCommmunicationClient());
RpcPipeKey key = pipe.put(source);
try {
Thread.sleep(1500L);
} catch (InterruptedException e) {
want.fail();
}
DbBatch target = pipe.get(key);
// 返回结果为空
want.bool(target == null).is(true);
}
use of com.alibaba.otter.shared.etl.model.DbBatch in project otter by alibaba.
the class DatabaseExtractorTest method test_override_field.
public void test_override_field() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
pipeline.getParameters().setSyncMode(SyncMode.FIELD);
// 设置为全局
pipeline.getParameters().setSyncConsistency(SyncConsistency.BASE);
int start = RandomUtils.nextInt();
int count = 10;
List<DataMediaPair> pairs = getDataMediaPairForOracle(start, count);
pipeline.setPairs(pairs);
new NonStrictExpectations() {
{
configClientService.findPipeline(100L);
returns(pipeline);
}
};
// 构造数据
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
for (int tableId = start; tableId < start + count; tableId++) {
for (int i = start; i < start + count; i++) {
EventData eventData = getEventData(tableId, i);
eventData.setSchemaName("srf");
eventData.setTableName("columns");
eventData.setSyncConsistency(SyncConsistency.MEDIA);
rowBatch.merge(eventData);
}
}
databaseExtractor.extract(new DbBatch(rowBatch));
want.number(rowBatch.getDatas().size()).isEqualTo(count);
}
Aggregations