Search in sources :

Example 6 with PipeException

use of com.alibaba.otter.node.etl.common.pipe.exception.PipeException in project otter by alibaba.

the class AttachmentHttpPipe method unpackFile.

// 处理对应的附件
private File unpackFile(HttpPipeKey key) {
    Pipeline pipeline = configClientService.findPipeline(key.getIdentity().getPipelineId());
    DataRetriever dataRetriever = dataRetrieverFactory.createRetriever(pipeline.getParameters().getRetriever(), key.getUrl(), downloadDir);
    File archiveFile = null;
    try {
        dataRetriever.connect();
        dataRetriever.doRetrieve();
        archiveFile = dataRetriever.getDataAsFile();
    } catch (Exception e) {
        dataRetriever.abort();
        throw new PipeException("download_error", e);
    } finally {
        dataRetriever.disconnect();
    }
    // 处理下有加密的数据
    if (StringUtils.isNotEmpty(key.getKey()) && StringUtils.isNotEmpty(key.getCrc())) {
        decodeFile(archiveFile, key.getKey(), key.getCrc());
    }
    // 去除末尾的.gzip后缀,做为解压目录
    String dir = StringUtils.removeEnd(archiveFile.getPath(), FilenameUtils.EXTENSION_SEPARATOR_STR + FilenameUtils.getExtension(archiveFile.getPath()));
    File unpackDir = new File(dir);
    // 开始解压
    getArchiveBean().unpack(archiveFile, unpackDir);
    return unpackDir;
}
Also used : PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) DataRetriever(com.alibaba.otter.node.etl.common.io.download.DataRetriever) File(java.io.File) BeansException(org.springframework.beans.BeansException) PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) Pipeline(com.alibaba.otter.shared.common.model.config.pipeline.Pipeline)

Example 7 with PipeException

use of com.alibaba.otter.node.etl.common.pipe.exception.PipeException in project otter by alibaba.

the class RowDataHttpPipe method getDbBatch.

// 处理对应的dbBatch
private DbBatch getDbBatch(HttpPipeKey key) {
    String dataUrl = key.getUrl();
    Pipeline pipeline = configClientService.findPipeline(key.getIdentity().getPipelineId());
    DataRetriever dataRetriever = dataRetrieverFactory.createRetriever(pipeline.getParameters().getRetriever(), dataUrl, downloadDir);
    File archiveFile = null;
    try {
        dataRetriever.connect();
        dataRetriever.doRetrieve();
        archiveFile = dataRetriever.getDataAsFile();
    } catch (Exception e) {
        dataRetriever.abort();
        throw new PipeException("download_error", e);
    } finally {
        dataRetriever.disconnect();
    }
    // 处理下有加密的数据
    if (StringUtils.isNotEmpty(key.getKey()) && StringUtils.isNotEmpty(key.getCrc())) {
        decodeFile(archiveFile, key.getKey(), key.getCrc());
    }
    InputStream input = null;
    JSONReader reader = null;
    try {
        input = new BufferedInputStream(new FileInputStream(archiveFile));
        DbBatch dbBatch = new DbBatch();
        byte[] lengthBytes = new byte[4];
        input.read(lengthBytes);
        int length = ByteUtils.bytes2int(lengthBytes);
        BatchProto.RowBatch rowbatchProto = BatchProto.RowBatch.parseFrom(new LimitedInputStream(input, length));
        // 构造原始的model对象
        RowBatch rowBatch = new RowBatch();
        rowBatch.setIdentity(build(rowbatchProto.getIdentity()));
        for (BatchProto.RowData rowDataProto : rowbatchProto.getRowsList()) {
            EventData eventData = new EventData();
            eventData.setPairId(rowDataProto.getPairId());
            eventData.setTableId(rowDataProto.getTableId());
            eventData.setTableName(rowDataProto.getTableName());
            eventData.setSchemaName(rowDataProto.getSchemaName());
            eventData.setEventType(EventType.valuesOf(rowDataProto.getEventType()));
            eventData.setExecuteTime(rowDataProto.getExecuteTime());
            // add by ljh at 2012-10-31
            if (StringUtils.isNotEmpty(rowDataProto.getSyncMode())) {
                eventData.setSyncMode(SyncMode.valuesOf(rowDataProto.getSyncMode()));
            }
            if (StringUtils.isNotEmpty(rowDataProto.getSyncConsistency())) {
                eventData.setSyncConsistency(SyncConsistency.valuesOf(rowDataProto.getSyncConsistency()));
            }
            // 处理主键
            List<EventColumn> keys = new ArrayList<EventColumn>();
            for (BatchProto.Column columnProto : rowDataProto.getKeysList()) {
                keys.add(buildColumn(columnProto));
            }
            eventData.setKeys(keys);
            // 处理old主键
            if (CollectionUtils.isEmpty(rowDataProto.getOldKeysList()) == false) {
                List<EventColumn> oldKeys = new ArrayList<EventColumn>();
                for (BatchProto.Column columnProto : rowDataProto.getOldKeysList()) {
                    oldKeys.add(buildColumn(columnProto));
                }
                eventData.setOldKeys(oldKeys);
            }
            // 处理具体的column value
            List<EventColumn> columns = new ArrayList<EventColumn>();
            for (BatchProto.Column columnProto : rowDataProto.getColumnsList()) {
                columns.add(buildColumn(columnProto));
            }
            eventData.setColumns(columns);
            eventData.setRemedy(rowDataProto.getRemedy());
            eventData.setSize(rowDataProto.getSize());
            eventData.setSql(rowDataProto.getSql());
            eventData.setDdlSchemaName(rowDataProto.getDdlSchemaName());
            eventData.setHint(rowDataProto.getHint());
            eventData.setWithoutSchema(rowDataProto.getWithoutSchema());
            // 添加到总记录
            rowBatch.merge(eventData);
        }
        dbBatch.setRowBatch(rowBatch);
        input.read(lengthBytes);
        length = ByteUtils.bytes2int(lengthBytes);
        BatchProto.FileBatch filebatchProto = BatchProto.FileBatch.parseFrom(new LimitedInputStream(input, length));
        // 构造原始的model对象
        FileBatch fileBatch = new FileBatch();
        fileBatch.setIdentity(build(filebatchProto.getIdentity()));
        for (BatchProto.FileData fileDataProto : filebatchProto.getFilesList()) {
            FileData fileData = new FileData();
            fileData.setPairId(fileDataProto.getPairId());
            fileData.setTableId(fileDataProto.getTableId());
            fileData.setEventType(EventType.valuesOf(fileDataProto.getEventType()));
            fileData.setLastModifiedTime(fileDataProto.getLastModifiedTime());
            fileData.setNameSpace(fileDataProto.getNamespace());
            fileData.setPath(fileDataProto.getPath());
            fileData.setSize(fileDataProto.getSize());
            // 添加到filebatch中
            fileBatch.getFiles().add(fileData);
        }
        dbBatch.setFileBatch(fileBatch);
        return dbBatch;
    } catch (IOException e) {
        throw new PipeException("deserial_error", e);
    } finally {
        IOUtils.closeQuietly(reader);
    }
}
Also used : EventColumn(com.alibaba.otter.shared.etl.model.EventColumn) ArrayList(java.util.ArrayList) DbBatch(com.alibaba.otter.shared.etl.model.DbBatch) EventData(com.alibaba.otter.shared.etl.model.EventData) BufferedInputStream(java.io.BufferedInputStream) FileData(com.alibaba.otter.shared.etl.model.FileData) FileBatch(com.alibaba.otter.shared.etl.model.FileBatch) BufferedInputStream(java.io.BufferedInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) DataRetriever(com.alibaba.otter.node.etl.common.io.download.DataRetriever) IOException(java.io.IOException) BatchProto(com.alibaba.otter.node.etl.model.protobuf.BatchProto) IOException(java.io.IOException) PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) FileInputStream(java.io.FileInputStream) Pipeline(com.alibaba.otter.shared.common.model.config.pipeline.Pipeline) RowBatch(com.alibaba.otter.shared.etl.model.RowBatch) PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) JSONReader(com.alibaba.fastjson.JSONReader) File(java.io.File)

Example 8 with PipeException

use of com.alibaba.otter.node.etl.common.pipe.exception.PipeException in project otter by alibaba.

the class RowDataMemoryPipe method prepareFile.

// 处理对应的附件
@SuppressWarnings("unused")
private File prepareFile(FileBatch fileBatch) {
    // 处理构造对应的文件url
    String dirname = buildFileName(fileBatch.getIdentity(), ClassUtils.getShortClassName(fileBatch.getClass()));
    File dir = new File(downloadDir, dirname);
    // 创建父目录
    NioUtils.create(dir, false, 3);
    // 压缩对应的文件数据
    List<FileData> fileDatas = fileBatch.getFiles();
    for (FileData fileData : fileDatas) {
        String namespace = fileData.getNameSpace();
        String path = fileData.getPath();
        boolean isLocal = StringUtils.isBlank(namespace);
        String entryName = null;
        if (true == isLocal) {
            entryName = FilenameUtils.getPath(path) + FilenameUtils.getName(path);
        } else {
            entryName = namespace + File.separator + path;
        }
        InputStream input = retrive(fileBatch.getIdentity(), fileData);
        if (input == null) {
            continue;
        }
        File entry = new File(dir, entryName);
        // 尝试创建父路径
        NioUtils.create(entry.getParentFile(), false, retry);
        FileOutputStream output = null;
        try {
            output = new FileOutputStream(entry);
            // 输出到压缩流中
            NioUtils.copy(input, output);
        } catch (Exception e) {
            throw new PipeException("prepareFile error for file[" + entry.getPath() + "]");
        } finally {
            IOUtils.closeQuietly(output);
        }
    }
    return dir;
}
Also used : FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileOutputStream(java.io.FileOutputStream) PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) File(java.io.File) FileData(com.alibaba.otter.shared.etl.model.FileData) PipeException(com.alibaba.otter.node.etl.common.pipe.exception.PipeException) FileNotFoundException(java.io.FileNotFoundException)

Aggregations

PipeException (com.alibaba.otter.node.etl.common.pipe.exception.PipeException)8 File (java.io.File)5 Pipeline (com.alibaba.otter.shared.common.model.config.pipeline.Pipeline)4 FileData (com.alibaba.otter.shared.etl.model.FileData)3 EncryptedData (com.alibaba.otter.node.etl.common.io.EncryptedData)2 DataRetriever (com.alibaba.otter.node.etl.common.io.download.DataRetriever)2 ChecksumException (com.alibaba.otter.node.etl.common.io.signature.ChecksumException)2 PipeKey (com.alibaba.otter.node.etl.common.pipe.PipeKey)2 HttpPipeKey (com.alibaba.otter.node.etl.common.pipe.impl.http.HttpPipeKey)2 MemoryPipeKey (com.alibaba.otter.node.etl.common.pipe.impl.memory.MemoryPipeKey)2 RpcPipeKey (com.alibaba.otter.node.etl.common.pipe.impl.rpc.RpcPipeKey)2 BatchProto (com.alibaba.otter.node.etl.model.protobuf.BatchProto)2 DbBatch (com.alibaba.otter.shared.etl.model.DbBatch)2 EventColumn (com.alibaba.otter.shared.etl.model.EventColumn)2 EventData (com.alibaba.otter.shared.etl.model.EventData)2 FileBatch (com.alibaba.otter.shared.etl.model.FileBatch)2 RowBatch (com.alibaba.otter.shared.etl.model.RowBatch)2 FileInputStream (java.io.FileInputStream)2 FileOutputStream (java.io.FileOutputStream)2 IOException (java.io.IOException)2