use of com.alibaba.otter.shared.etl.model.Identity in project otter by alibaba.
the class OtterTransformerTest method test_rowData_mysql_oracle.
@Test
public void test_rowData_mysql_oracle() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = new ArrayList<DataMediaPair>();
DataMediaPair pair1 = new DataMediaPair();
pair1.setId(1L);
pair1.setPipelineId(pipeline.getId());
pair1.setPullWeight(1L);
pair1.setPushWeight(1L);
DbDataMedia mysqlMedia = getMysqlMedia();
mysqlMedia.setId(1L);
pair1.setSource(mysqlMedia);
DbDataMedia oracleMedia = getOracleMedia();
pair1.setTarget(oracleMedia);
pairs.add(pair1);
pipeline.setPairs(pairs);
PipelineParameter param = new PipelineParameter();
param.setSyncMode(SyncMode.ROW);
pipeline.setParameters(param);
new NonStrictExpectations() {
{
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
EventData eventData = new EventData();
eventData.setTableId(1L);
eventData.setSchemaName("srf");
eventData.setTableName("columns");
eventData.setEventType(EventType.UPDATE);
eventData.setExecuteTime(100L);
eventData.getKeys().add(buildColumn("id", Types.INTEGER, "1", true, false));
eventData.getKeys().add(buildColumn("name", Types.VARCHAR, "ljh", true, false));
eventData.getColumns().add(buildColumn("alias_name", Types.CHAR, "hello", false, false));
eventData.getColumns().add(buildColumn("amount", Types.DECIMAL, "100.01", false, false));
eventData.getColumns().add(buildColumn("text_b", Types.BLOB, "[116,101,120,116,95,98]", false, false));
eventData.getColumns().add(buildColumn("text_c", Types.CLOB, "text_c", false, false));
eventData.getColumns().add(buildColumn("curr_date", Types.DATE, "2011-01-01", false, false));
eventData.getColumns().add(buildColumn("gmt_create", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
eventData.getColumns().add(buildColumn("gmt_modify", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
rowBatch.merge(eventData);
Map<Class, BatchObject> batchs = otterTransformFactory.transform(rowBatch);
RowBatch result = (RowBatch) batchs.get(EventData.class);
want.number(result.getDatas().size()).isEqualTo(1);
}
use of com.alibaba.otter.shared.etl.model.Identity in project otter by alibaba.
the class OtterTransformerTest method test_fileData.
@Test
public void test_fileData() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = new ArrayList<DataMediaPair>();
DataMediaPair pair1 = new DataMediaPair();
pair1.setId(1L);
pair1.setPipelineId(pipeline.getId());
pair1.setPullWeight(1L);
pair1.setPushWeight(1L);
DbDataMedia oracleMedia = getOracleMedia();
oracleMedia.setId(1L);
pair1.setSource(oracleMedia);
DbDataMedia mysqlMedia = getMysqlMedia();
pair1.setTarget(mysqlMedia);
pairs.add(pair1);
pipeline.setPairs(pairs);
new NonStrictExpectations() {
{
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
File localFile = new File("/tmp", "httpPipeTest.jpg");
FileData localFileData = new FileData();
localFileData.setTableId(1L);
localFileData.setPairId(1L);
localFileData.setPath(localFile.getPath());
fileBatch.getFiles().add(localFileData);
Map<Class, BatchObject> batchs = otterTransformFactory.transform(fileBatch);
FileBatch result = (FileBatch) batchs.get(FileData.class);
want.number(result.getFiles().size()).isEqualTo(1);
}
use of com.alibaba.otter.shared.etl.model.Identity in project otter by alibaba.
the class MemoryPipeTest method test_timeout.
@Test
public void test_timeout() {
RowDataMemoryPipe pipe = new RowDataMemoryPipe();
// 1s后超时
pipe.setTimeout(1 * 1000L);
pipe.setDownloadDir(tmp);
try {
pipe.afterPropertiesSet();
} catch (Exception e) {
want.fail();
}
DbBatch source = new DbBatch();
RowBatch rowBatch = new RowBatch();
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
rowBatch.setIdentity(identity);
source.setRowBatch(rowBatch);
MemoryPipeKey key = pipe.put(source);
try {
Thread.sleep(1500L);
} catch (InterruptedException e) {
want.fail();
}
DbBatch target = pipe.get(key);
// 返回结果为空
want.bool(target == null).is(true);
}
use of com.alibaba.otter.shared.etl.model.Identity in project otter by alibaba.
the class FileLoadActionTest method testLoadWithLocal.
@Test
public void testLoadWithLocal() throws IOException {
// 构造fileData使用的参数,fileDataStartIndex 决定着 pipeline 与 fileData 对应的关系(通过
// dataMediaPair 的 id),
// 以及 dataMediaPair 的 pushWeight
final int fileDataStartIndex = 0;
final int fileDataCount = 50;
final Pipeline pipeline = buildPipeline(fileDataStartIndex, fileDataCount);
final Channel channel = new Channel();
new NonStrictExpectations() {
{
configClientService.findChannel(anyLong);
returns(channel);
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity id = buildIdentity(1L, 2L, 3L);
FileBatch fileBatch = buildFileBatch(id);
fileBatch.getFiles().addAll(buildFileDatas(null, EventType.INSERT, fileDataStartIndex, fileDataCount, true));
WeightController controller = new WeightController(1);
FileLoadContext context = fileLoadAction.load(fileBatch, ROOT_DIR, controller);
want.object(context.getChannel()).isEqualTo(channel);
want.object(context.getPipeline()).isEqualTo(pipeline);
want.object(context.getPrepareDatas()).isEqualTo(fileBatch.getFiles());
want.number(context.getProcessedDatas().size()).isEqualTo(fileBatch.getFiles().size());
}
use of com.alibaba.otter.shared.etl.model.Identity in project otter by alibaba.
the class OtterLoaderFactoryIntegration method test_simple.
@Test
public void test_simple() {
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
final DbBatch dbBatch = new DbBatch();
dbBatch.setRowBatch(rowBatch);
dbBatch.setFileBatch(fileBatch);
final CountDownLatch latch = new CountDownLatch(1);
executorService.submit(new Runnable() {
public void run() {
System.out.println("first run!!!!!!");
otterLoaderFactory.load(dbBatch);
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
}
}
Aggregations