Search in sources :

Example 1 with FileSyncZipDataWriter

use of org.jumpmind.symmetric.file.FileSyncZipDataWriter in project symmetric-ds by JumpMind.

the class FileSyncExtractorService method buildMultiBatchStagingWriter.

@Override
protected MultiBatchStagingWriter buildMultiBatchStagingWriter(ExtractRequest request, final Node sourceNode, final Node targetNode, List<OutgoingBatch> batches, ProcessInfo processInfo, Channel channel) {
    MultiBatchStagingWriter multiBatchStagingWriter = new MultiBatchStagingWriter(this, request, sourceNode.getNodeId(), stagingManager, batches, channel.getMaxBatchSize(), processInfo) {

        @Override
        protected IDataWriter buildWriter(long memoryThresholdInBytes) {
            IStagedResource stagedResource = stagingManager.create(fileSyncService.getStagingPathComponents(outgoingBatch));
            log.info("Extracting file sync batch {} to resource '{}'", outgoingBatch.getNodeBatchId(), stagedResource);
            long maxBytesToSync = parameterService.getLong(ParameterConstants.TRANSPORT_MAX_BYTES_TO_SYNC);
            FileSyncZipDataWriter fileSyncWriter = new FileSyncZipDataWriter(maxBytesToSync, fileSyncService, nodeService, stagedResource) {

                @Override
                public void close() {
                    super.finish();
                }
            };
            return fileSyncWriter;
        }
    };
    return multiBatchStagingWriter;
}
Also used : FileSyncZipDataWriter(org.jumpmind.symmetric.file.FileSyncZipDataWriter) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Example 2 with FileSyncZipDataWriter

use of org.jumpmind.symmetric.file.FileSyncZipDataWriter in project symmetric-ds by JumpMind.

the class FileSyncService method sendFiles.

public List<OutgoingBatch> sendFiles(ProcessInfo processInfo, Node targetNode, IOutgoingTransport outgoingTransport) {
    List<OutgoingBatch> batchesToProcess = getBatchesToProcess(targetNode);
    if (batchesToProcess.isEmpty()) {
        return batchesToProcess;
    }
    IStagingManager stagingManager = engine.getStagingManager();
    long maxBytesToSync = parameterService.getLong(ParameterConstants.TRANSPORT_MAX_BYTES_TO_SYNC);
    List<OutgoingBatch> processedBatches = new ArrayList<OutgoingBatch>();
    OutgoingBatch currentBatch = null;
    IStagedResource stagedResource = null;
    IStagedResource previouslyStagedResource = null;
    FileSyncZipDataWriter dataWriter = null;
    try {
        long syncedBytes = 0;
        try {
            for (int i = 0; i < batchesToProcess.size(); i++) {
                currentBatch = batchesToProcess.get(i);
                previouslyStagedResource = getStagedResource(currentBatch);
                if (isWaitForExtractionRequired(currentBatch, previouslyStagedResource) || isFlushBatchesRequired(currentBatch, processedBatches, previouslyStagedResource)) {
                    // previously staged batch will have to wait for the next push/pull.
                    break;
                }
                if (previouslyStagedResource != null) {
                    log.debug("Using existing extraction for file sync batch {}", currentBatch.getNodeBatchId());
                    stagedResource = previouslyStagedResource;
                } else {
                    if (dataWriter == null) {
                        stagedResource = stagingManager.create(Constants.STAGING_CATEGORY_OUTGOING, processInfo.getSourceNodeId(), targetNode.getNodeId(), "filesync.zip");
                        dataWriter = new FileSyncZipDataWriter(maxBytesToSync, this, engine.getNodeService(), stagedResource);
                    }
                    log.debug("Extracting batch {} for filesync.", currentBatch.getNodeBatchId());
                    ((DataExtractorService) engine.getDataExtractorService()).extractOutgoingBatch(processInfo, targetNode, dataWriter, currentBatch, false, true, DataExtractorService.ExtractMode.FOR_SYM_CLIENT);
                }
                processedBatches.add(currentBatch);
                syncedBytes += stagedResource.getSize();
                processInfo.incrementBatchCount();
                processInfo.setCurrentBatchId(currentBatch.getBatchId());
                log.debug("Processed file sync batch {}. syncedBytes={}, maxBytesToSync={}", currentBatch, syncedBytes, maxBytesToSync);
                /*
                     * check to see if max bytes to sync has been reached and
                     * stop processing batches
                     */
                if (previouslyStagedResource != null || dataWriter.readyToSend()) {
                    break;
                }
            }
        } finally {
            if (dataWriter != null) {
                dataWriter.finish();
            }
        }
        processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
        for (OutgoingBatch outgoingBatch : processedBatches) {
            outgoingBatch.setStatus(Status.SE);
        }
        engine.getOutgoingBatchService().updateOutgoingBatches(processedBatches);
        try {
            if (stagedResource != null && stagedResource.exists()) {
                InputStream is = stagedResource.getInputStream();
                try {
                    OutputStream os = outgoingTransport.openStream();
                    IOUtils.copy(is, os);
                    os.flush();
                } catch (IOException e) {
                    throw new IoException(e);
                }
            }
            for (int i = 0; i < batchesToProcess.size(); i++) {
                batchesToProcess.get(i).setStatus(Status.LD);
            }
            engine.getOutgoingBatchService().updateOutgoingBatches(batchesToProcess);
        } finally {
            if (stagedResource != null) {
                stagedResource.close();
            }
        }
    } catch (RuntimeException e) {
        if (stagedResource == previouslyStagedResource) {
            // on error, don't let the load extract be deleted.
            stagedResource = null;
        }
        if (currentBatch != null) {
            engine.getStatisticManager().incrementDataExtractedErrors(currentBatch.getChannelId(), 1);
            currentBatch.setSqlMessage(getRootMessage(e));
            currentBatch.revertStatsOnError();
            if (currentBatch.getStatus() != Status.IG) {
                currentBatch.setStatus(Status.ER);
            }
            currentBatch.setErrorFlag(true);
            engine.getOutgoingBatchService().updateOutgoingBatch(currentBatch);
            if (isStreamClosedByClient(e)) {
                log.warn("Failed to extract file sync batch {}.  The stream was closed by the client.  The error was: {}", currentBatch, getRootMessage(e));
            } else {
                log.error("Failed to extract file sync batch " + currentBatch, e);
            }
        } else {
            log.error("Could not log the outgoing batch status because the batch was null", e);
        }
        throw e;
    } finally {
        if (stagedResource != null) {
            stagedResource.delete();
        }
    }
    return processedBatches;
}
Also used : InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) IOException(java.io.IOException) FileSyncZipDataWriter(org.jumpmind.symmetric.file.FileSyncZipDataWriter) IStagingManager(org.jumpmind.symmetric.io.stage.IStagingManager) IoException(org.jumpmind.exception.IoException) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch)

Aggregations

FileSyncZipDataWriter (org.jumpmind.symmetric.file.FileSyncZipDataWriter)2 IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)2 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1 OutputStream (java.io.OutputStream)1 ArrayList (java.util.ArrayList)1 IoException (org.jumpmind.exception.IoException)1 IStagingManager (org.jumpmind.symmetric.io.stage.IStagingManager)1 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)1