use of com.alibaba.otter.shared.etl.model.FileData in project otter by alibaba.
the class HttpPipeIntegration method test_rowData.
@Test
public void test_rowData() {
final Node currentNode = new Node();
currentNode.setId(1L);
currentNode.setIp("127.0.0.1");
currentNode.setParameters(new NodeParameter());
final Pipeline pipeline = new Pipeline();
pipeline.getParameters().setRetriever(RetrieverType.ARIA2C);
// mock一下
new NonStrictExpectations() {
{
configClientService.currentNode();
returns(currentNode);
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
File localFile = new File(tmp, "httpPipeTest.jpg");
FileData localFileData = new FileData();
localFileData.setPath(localFile.getPath());
localFileData.setEventType(EventType.INSERT);
localFileData.setLastModifiedTime(new Date().getTime());
localFileData.setSize(100L);
localFileData.setTableId(1L);
fileBatch.getFiles().add(localFileData);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
EventData eventData = new EventData();
eventData.setTableId(1L);
eventData.setSchemaName("otter");
eventData.setTableName("test");
eventData.setEventType(EventType.INSERT);
eventData.setExecuteTime(100L);
EventColumn primaryKey = new EventColumn();
primaryKey.setColumnName("id");
primaryKey.setColumnType(1);
primaryKey.setColumnValue("1");
primaryKey.setKey(true);
primaryKey.setNull(false);
eventData.getKeys().add(primaryKey);
EventColumn column = new EventColumn();
column.setColumnName("name");
column.setColumnType(1);
column.setColumnValue("test");
column.setKey(false);
column.setNull(false);
eventData.getColumns().add(column);
rowBatch.merge(eventData);
DbBatch dbBatch = new DbBatch();
dbBatch.setRowBatch(rowBatch);
dbBatch.setFileBatch(fileBatch);
HttpPipeKey key = rowDataHttpPipe.put(dbBatch);
DbBatch target = rowDataHttpPipe.get(key);
want.bool(target.getRowBatch().getIdentity().equals(identity));
want.object(target).notNull();
}
use of com.alibaba.otter.shared.etl.model.FileData in project otter by alibaba.
the class OtterTransformerFactory method transform.
/**
* 转化FileBatch对象
*/
public Map<Class, BatchObject> transform(FileBatch fileBatch) {
final Identity identity = translateIdentity(fileBatch.getIdentity());
List<FileData> fileDatas = fileBatch.getFiles();
Map<Class, BatchObject> result = new HashMap<Class, BatchObject>();
// 初始化默认值
result.put(FileData.class, initBatchObject(identity, FileData.class));
for (FileData fileData : fileDatas) {
// 进行转化
Long tableId = fileData.getTableId();
Pipeline pipeline = configClientService.findPipeline(identity.getPipelineId());
// 针对每个同步数据,可能会存在多路复制的情况
List<DataMediaPair> dataMediaPairs = ConfigHelper.findDataMediaPairByMediaId(pipeline, tableId);
for (DataMediaPair pair : dataMediaPairs) {
if (!pair.getSource().getId().equals(tableId)) {
// 过滤tableID不为源的同步
continue;
}
Object item = fileDataTransformer.transform(fileData, new OtterTransformerContext(identity, pair, pipeline));
if (item == null) {
continue;
}
// 合并结果
merge(identity, result, item);
}
}
return result;
}
use of com.alibaba.otter.shared.etl.model.FileData in project otter by alibaba.
the class OtterTransformerFactory method merge.
// =============================== helper method
// ============================
// 将生成的item对象合并到结果对象中
private synchronized void merge(Identity identity, Map<Class, BatchObject> data, Object item) {
Class clazz = item.getClass();
BatchObject batchObject = data.get(clazz);
// 初始化一下对象
if (batchObject == null) {
batchObject = initBatchObject(identity, clazz);
data.put(clazz, batchObject);
}
// 进行merge处理
if (batchObject instanceof RowBatch) {
((RowBatch) batchObject).merge((EventData) item);
} else if (batchObject instanceof FileBatch) {
((FileBatch) batchObject).getFiles().add((FileData) item);
} else {
throw new TransformException("no support Data[" + clazz.getName() + "]");
}
}
use of com.alibaba.otter.shared.etl.model.FileData in project otter by alibaba.
the class ArchiveBeanTest method test_simple.
@Test
public void test_simple() {
File[] files = new File[10];
List<FileData> fileDatas = new ArrayList<FileData>();
File archiveFile = new File(tmp, "pack.zip");
File unpack = new File(tmp, "unpack");
ArchiveBean archiveBean = new ArchiveBean();
try {
archiveBean.afterPropertiesSet();
} catch (Exception e1) {
want.fail();
}
try {
// for (int i = 0; i < 10; i++) {
// files[i] = new File(tmp, "archiveTest_" + i + ".txt");
// datas[i] = getBlock((i + 1) * 1024);
// NioUtils.write(datas[i], files[i]);
//
// FileData filedata = new FileData();
// filedata.setPath(files[i].getPath());
// fileDatas.add(filedata);
// }
archiveBean.pack(archiveFile, fileDatas, new ArchiveRetriverCallback<FileData>() {
public InputStream retrive(FileData source) {
try {
return new FileInputStream(new File(source.getPath()));
} catch (FileNotFoundException e) {
e.printStackTrace();
want.fail();
}
return null;
}
});
// 开始解压
List<File> result = archiveBean.unpack(archiveFile, unpack);
want.bool(result.size() == fileDatas.size());
// File dir = new File(unpack, archiveFile.getParent());
// File[] unpackFiles = dir.listFiles();
//
// List<File> unpackFilesList = Arrays.asList(unpackFiles);
// Collections.sort(unpackFilesList); // 排序一下
// for (int i = 0; i < unpackFilesList.size(); i++) {
// byte[] data = NioUtils.read(unpackFilesList.get(i));
// check(data, datas[i]);
// }
} catch (Exception e) {
want.fail();
} finally {
for (int i = 0; i < files.length; i++) {
NioUtils.delete(files[i]);
}
NioUtils.delete(archiveFile);
NioUtils.delete(unpack);
}
}
Aggregations