Search in sources :

Example 11 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class DataExtractorService method isRetry.

protected boolean isRetry(OutgoingBatch currentBatch, Node remoteNode) {
    boolean offline = parameterService.is(ParameterConstants.NODE_OFFLINE, false);
    IStagedResource previouslyExtracted = getStagedResource(currentBatch);
    boolean cclient = StringUtils.equals(remoteNode.getDeploymentType(), Constants.DEPLOYMENT_TYPE_CCLIENT);
    return !offline && previouslyExtracted != null && previouslyExtracted.exists() && previouslyExtracted.getState() != State.CREATE && currentBatch.getStatus() != OutgoingBatch.Status.RS && currentBatch.getSentCount() > 0 && remoteNode.isVersionGreaterThanOrEqualTo(3, 8, 0) && !cclient;
}
Also used : IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Example 12 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class MultiBatchStagingWriter method nextBatch.

protected void nextBatch() {
    if (this.outgoingBatch != null) {
        this.finishedBatches.add(outgoingBatch);
        rowCount += this.outgoingBatch.getDataEventCount();
        byteCount += this.outgoingBatch.getByteCount();
    }
    this.outgoingBatch = this.batches.remove(0);
    this.outgoingBatch.setDataEventCount(0);
    this.outgoingBatch.setInsertEventCount(0);
    if (this.finishedBatches.size() > 0) {
        this.outgoingBatch.setExtractCount(this.outgoingBatch.getExtractCount() + 1);
    }
    /*
         * Update the last update time so the batch 
         * isn't purged prematurely
         */
    for (OutgoingBatch batch : finishedBatches) {
        IStagedResource resource = this.dataExtractorService.getStagedResource(batch);
        if (resource != null) {
            resource.refreshLastUpdateTime();
        }
    }
}
Also used : IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch)

Example 13 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class StagingDataWriter method endBatch.

@Override
protected void endBatch(Batch batch) {
    IStagedResource resource = getStagedResource(batch);
    resource.close();
    resource.setState(State.READY);
    flushNodeId = true;
    processedTables.clear();
    table = null;
}
Also used : IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Example 14 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class DataExtractorService method extract.

protected List<OutgoingBatch> extract(ProcessInfo processInfo, Node targetNode, List<OutgoingBatch> activeBatches, IDataWriter dataWriter, ExtractMode mode) {
    boolean streamToFileEnabled = parameterService.is(ParameterConstants.STREAM_TO_FILE_ENABLED);
    List<OutgoingBatch> processedBatches = new ArrayList<OutgoingBatch>(activeBatches.size());
    if (activeBatches.size() > 0) {
        Set<String> channelsProcessed = new HashSet<String>();
        long batchesSelectedAtMs = System.currentTimeMillis();
        OutgoingBatch currentBatch = null;
        try {
            long bytesSentCount = 0;
            int batchesSentCount = 0;
            long maxBytesToSync = parameterService.getLong(ParameterConstants.TRANSPORT_MAX_BYTES_TO_SYNC);
            for (int i = 0; i < activeBatches.size(); i++) {
                currentBatch = activeBatches.get(i);
                channelsProcessed.add(currentBatch.getChannelId());
                processInfo.setDataCount(currentBatch.getDataEventCount());
                processInfo.setCurrentBatchId(currentBatch.getBatchId());
                processInfo.setCurrentLoadId(currentBatch.getLoadId());
                currentBatch = requeryIfEnoughTimeHasPassed(batchesSelectedAtMs, currentBatch);
                if (currentBatch.isExtractJobFlag() && currentBatch.getStatus() != Status.IG) {
                    if (parameterService.is(ParameterConstants.INITIAL_LOAD_USE_EXTRACT_JOB)) {
                        if (currentBatch.getStatus() != Status.RQ && currentBatch.getStatus() != Status.IG && !isPreviouslyExtracted(currentBatch)) {
                            /*
                                 * the batch must have been purged. it needs to
                                 * be re-extracted
                                 */
                            log.info("Batch {} is marked as ready but it has been deleted.  Rescheduling it for extraction", currentBatch.getNodeBatchId());
                            if (changeBatchStatus(Status.RQ, currentBatch, mode)) {
                                resetExtractRequest(currentBatch);
                            }
                            break;
                        } else if (currentBatch.getStatus() == Status.RQ) {
                            log.info("Batch {} is not ready for delivery.  It is currently scheduled for extraction", currentBatch.getNodeBatchId());
                            break;
                        }
                    } else {
                        currentBatch.setStatus(Status.NE);
                        currentBatch.setExtractJobFlag(false);
                    }
                } else {
                    processInfo.setStatus(ProcessInfo.Status.EXTRACTING);
                    currentBatch = extractOutgoingBatch(processInfo, targetNode, dataWriter, currentBatch, streamToFileEnabled, true, mode);
                }
                if (streamToFileEnabled || mode == ExtractMode.FOR_PAYLOAD_CLIENT) {
                    processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
                    currentBatch = sendOutgoingBatch(processInfo, targetNode, currentBatch, dataWriter, mode);
                }
                processedBatches.add(currentBatch);
                if (currentBatch.getStatus() != Status.OK) {
                    currentBatch.setLoadCount(currentBatch.getLoadCount() + 1);
                    changeBatchStatus(Status.LD, currentBatch, mode);
                    bytesSentCount += currentBatch.getByteCount();
                    batchesSentCount++;
                    if (bytesSentCount >= maxBytesToSync && processedBatches.size() < activeBatches.size()) {
                        log.info("Reached the total byte threshold after {} of {} batches were extracted for node '{}'.  The remaining batches will be extracted on a subsequent sync", new Object[] { batchesSentCount, activeBatches.size(), targetNode.getNodeId() });
                        break;
                    }
                }
            }
        } catch (RuntimeException e) {
            SQLException se = unwrapSqlException(e);
            if (currentBatch != null) {
                /* Reread batch in case the ignore flag has been set */
                currentBatch = outgoingBatchService.findOutgoingBatch(currentBatch.getBatchId(), currentBatch.getNodeId());
                statisticManager.incrementDataExtractedErrors(currentBatch.getChannelId(), 1);
                if (se != null) {
                    currentBatch.setSqlState(se.getSQLState());
                    currentBatch.setSqlCode(se.getErrorCode());
                    currentBatch.setSqlMessage(se.getMessage());
                } else {
                    currentBatch.setSqlMessage(getRootMessage(e));
                }
                currentBatch.revertStatsOnError();
                if (currentBatch.getStatus() != Status.IG && currentBatch.getStatus() != Status.OK) {
                    currentBatch.setStatus(Status.ER);
                    currentBatch.setErrorFlag(true);
                }
                outgoingBatchService.updateOutgoingBatch(currentBatch);
                if (isStreamClosedByClient(e)) {
                    log.warn("Failed to transport batch {}.  The stream was closed by the client.  There is a good chance that a previously sent batch errored out and the stream was closed or there was a network error.  The error was: {}", currentBatch, getRootMessage(e));
                } else {
                    if (e instanceof ProtocolException) {
                        IStagedResource resource = getStagedResource(currentBatch);
                        if (resource != null) {
                            resource.delete();
                        }
                    }
                    log.error("Failed to extract batch {}", currentBatch, e);
                }
                processInfo.setStatus(ProcessInfo.Status.ERROR);
            } else {
                log.error("Could not log the outgoing batch status because the batch was null", e);
            }
        }
        // Next, we update the node channel controls to the
        // current timestamp
        Calendar now = Calendar.getInstance();
        for (String channelProcessed : channelsProcessed) {
            NodeChannel nodeChannel = configurationService.getNodeChannel(channelProcessed, targetNode.getNodeId(), false);
            if (nodeChannel != null) {
                nodeChannel.setLastExtractTime(now.getTime());
                configurationService.updateLastExtractTime(nodeChannel);
            }
        }
        return processedBatches;
    } else {
        return Collections.emptyList();
    }
}
Also used : ProtocolException(org.jumpmind.symmetric.io.data.ProtocolException) SQLException(java.sql.SQLException) Calendar(java.util.Calendar) ArrayList(java.util.ArrayList) TransformPoint(org.jumpmind.symmetric.io.data.transform.TransformPoint) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) NodeChannel(org.jumpmind.symmetric.model.NodeChannel) HashSet(java.util.HashSet)

Example 15 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class DataExtractorService method sendOutgoingBatch.

protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, IDataWriter dataWriter, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        currentBatch.setSentCount(currentBatch.getSentCount() + 1);
        changeBatchStatus(Status.SE, currentBatch, mode);
        long ts = System.currentTimeMillis();
        IStagedResource extractedBatch = getStagedResource(currentBatch);
        if (extractedBatch != null) {
            IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
            DataContext ctx = new DataContext();
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
            new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
            if (dataWriter.getStatistics().size() > 0) {
                Statistics stats = dataWriter.getStatistics().values().iterator().next();
                statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataWriterStatisticConstants.STATEMENTCOUNT));
                long byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
            } else {
                log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
            }
        } else {
            throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
        }
        currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) DataContext(org.jumpmind.symmetric.io.data.DataContext) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) ProtocolDataReader(org.jumpmind.symmetric.io.data.reader.ProtocolDataReader) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter)

Aggregations

IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)17 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)8 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 IoException (org.jumpmind.exception.IoException)4 DataContext (org.jumpmind.symmetric.io.data.DataContext)4 Node (org.jumpmind.symmetric.model.Node)4 SQLException (java.sql.SQLException)3 CancellationException (java.util.concurrent.CancellationException)3 Batch (org.jumpmind.symmetric.io.data.Batch)3 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)3 IDataReader (org.jumpmind.symmetric.io.data.IDataReader)3 ProtocolException (org.jumpmind.symmetric.io.data.ProtocolException)3 IStagingManager (org.jumpmind.symmetric.io.stage.IStagingManager)3 Channel (org.jumpmind.symmetric.model.Channel)3 NodeChannel (org.jumpmind.symmetric.model.NodeChannel)3 ProcessInfoDataWriter (org.jumpmind.symmetric.model.ProcessInfoDataWriter)3 Statistics (org.jumpmind.util.Statistics)3 BufferedWriter (java.io.BufferedWriter)2 Calendar (java.util.Calendar)2