Search in sources :

Example 1 with ProcessInfoDataWriter

use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.

the class DataExtractorService method wrapWithTransformWriter.

protected IDataWriter wrapWithTransformWriter(Node sourceNode, Node targetNode, ProcessInfo processInfo, IDataWriter dataWriter, boolean useStagingDataWriter) {
    TransformWriter transformExtractWriter = null;
    if (useStagingDataWriter) {
        long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
        transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, true, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
    } else {
        transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
    }
    return transformExtractWriter;
}
Also used : StagingDataWriter(org.jumpmind.symmetric.io.data.writer.StagingDataWriter) TransformWriter(org.jumpmind.symmetric.io.data.writer.TransformWriter) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter)

Example 2 with ProcessInfoDataWriter

use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.

the class DataExtractorService method sendOutgoingBatch.

protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, boolean isRetry, IDataWriter dataWriter, BufferedWriter writer, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        currentBatch.setSentCount(currentBatch.getSentCount() + 1);
        long ts = System.currentTimeMillis();
        IStagedResource extractedBatch = getStagedResource(currentBatch);
        if (extractedBatch != null) {
            if (mode == ExtractMode.FOR_SYM_CLIENT && writer != null) {
                if (!isRetry && parameterService.is(ParameterConstants.OUTGOING_BATCH_COPY_TO_INCOMING_STAGING) && !parameterService.is(ParameterConstants.NODE_OFFLINE, false)) {
                    ISymmetricEngine targetEngine = AbstractSymmetricEngine.findEngineByUrl(targetNode.getSyncUrl());
                    if (targetEngine != null && extractedBatch.isFileResource()) {
                        try {
                            Node sourceNode = nodeService.findIdentity();
                            IStagedResource targetResource = targetEngine.getStagingManager().create(Constants.STAGING_CATEGORY_INCOMING, Batch.getStagedLocation(false, sourceNode.getNodeId()), currentBatch.getBatchId());
                            SymmetricUtils.copyFile(extractedBatch.getFile(), targetResource.getFile());
                            targetResource.setState(State.DONE);
                            isRetry = true;
                        } catch (Exception e) {
                            throw new RuntimeException(e);
                        }
                    }
                }
                Channel channel = configurationService.getChannel(currentBatch.getChannelId());
                DataContext ctx = new DataContext();
                transferFromStaging(mode, BatchType.EXTRACT, currentBatch, isRetry, extractedBatch, writer, ctx, channel.getMaxKBytesPerSecond());
            } else {
                IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
                DataContext ctx = new DataContext();
                ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
                new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
                if (dataReader.getStatistics().size() > 0) {
                    Statistics stats = dataReader.getStatistics().values().iterator().next();
                    statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataReaderStatistics.READ_RECORD_COUNT));
                    long byteCount = stats.get(DataReaderStatistics.READ_BYTE_COUNT);
                    statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
                } else {
                    log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
                }
            }
        } else {
            throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
        }
        currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) Node(org.jumpmind.symmetric.model.Node) NodeChannel(org.jumpmind.symmetric.model.NodeChannel) Channel(org.jumpmind.symmetric.model.Channel) ISymmetricEngine(org.jumpmind.symmetric.ISymmetricEngine) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) DataReaderStatistics(org.jumpmind.symmetric.io.data.reader.DataReaderStatistics) CancellationException(java.util.concurrent.CancellationException) SymmetricException(org.jumpmind.symmetric.SymmetricException) SQLException(java.sql.SQLException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) ProtocolException(org.jumpmind.symmetric.io.data.ProtocolException) TimeoutException(java.util.concurrent.TimeoutException) IoException(org.jumpmind.exception.IoException) DataContext(org.jumpmind.symmetric.io.data.DataContext) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) ProtocolDataReader(org.jumpmind.symmetric.io.data.reader.ProtocolDataReader) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter)

Example 3 with ProcessInfoDataWriter

use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.

the class DataExtractorService method extractOutgoingBatch.

protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter, OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        Node sourceNode = nodeService.findIdentity();
        TransformWriter transformExtractWriter = null;
        if (useStagingDataWriter) {
            long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
        } else {
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
        }
        long ts = System.currentTimeMillis();
        long extractTimeInMs = 0l;
        long byteCount = 0l;
        long transformTimeInMs = 0l;
        if (currentBatch.getStatus() == Status.IG) {
            Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
            batch.setIgnored(true);
            try {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.delete();
                }
                DataContext ctx = new DataContext(batch);
                ctx.put("targetNode", targetNode);
                ctx.put("sourceNode", sourceNode);
                transformExtractWriter.open(ctx);
                transformExtractWriter.start(batch);
                transformExtractWriter.end(batch, false);
            } finally {
                transformExtractWriter.close();
            }
        } else if (!isPreviouslyExtracted(currentBatch)) {
            int maxPermits = parameterService.getInt(ParameterConstants.CONCURRENT_WORKERS);
            String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId()) : currentBatch.getNodeBatchId();
            Semaphore lock = null;
            try {
                synchronized (locks) {
                    lock = locks.get(semaphoreKey);
                    if (lock == null) {
                        lock = new Semaphore(maxPermits);
                        locks.put(semaphoreKey, lock);
                    }
                    try {
                        lock.acquire();
                    } catch (InterruptedException e) {
                        throw new org.jumpmind.exception.InterruptedException(e);
                    }
                }
                synchronized (lock) {
                    if (!isPreviouslyExtracted(currentBatch)) {
                        currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
                        if (updateBatchStatistics) {
                            changeBatchStatus(Status.QY, currentBatch, mode);
                        }
                        currentBatch.resetStats();
                        IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(currentBatch, sourceNode, targetNode, processInfo));
                        DataContext ctx = new DataContext();
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
                        new DataProcessor(dataReader, transformExtractWriter, "extract").process(ctx);
                        extractTimeInMs = System.currentTimeMillis() - ts;
                        Statistics stats = transformExtractWriter.getNestedWriter().getStatistics().values().iterator().next();
                        transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
                        extractTimeInMs = extractTimeInMs - transformTimeInMs;
                        byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                    }
                }
            } catch (RuntimeException ex) {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.close();
                    resource.delete();
                }
                throw ex;
            } finally {
                lock.release();
                synchronized (locks) {
                    if (lock.availablePermits() == maxPermits) {
                        locks.remove(semaphoreKey);
                    }
                }
            }
        }
        if (updateBatchStatistics) {
            long dataEventCount = currentBatch.getDataEventCount();
            long insertEventCount = currentBatch.getInsertEventCount();
            currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
            // preserve in the case of a reload event
            if (dataEventCount > currentBatch.getDataEventCount()) {
                currentBatch.setDataEventCount(dataEventCount);
            }
            // preserve in the case of a reload event
            if (insertEventCount > currentBatch.getInsertEventCount()) {
                currentBatch.setInsertEventCount(insertEventCount);
            }
            // "re-queried"
            if (extractTimeInMs > 0) {
                currentBatch.setExtractMillis(extractTimeInMs);
            }
            if (byteCount > 0) {
                currentBatch.setByteCount(byteCount);
                statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
                statisticManager.incrementDataExtracted(currentBatch.getChannelId(), currentBatch.getExtractCount());
            }
        }
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) Node(org.jumpmind.symmetric.model.Node) Semaphore(java.util.concurrent.Semaphore) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) DataContext(org.jumpmind.symmetric.io.data.DataContext) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) StagingDataWriter(org.jumpmind.symmetric.io.data.writer.StagingDataWriter) TransformWriter(org.jumpmind.symmetric.io.data.writer.TransformWriter) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 4 with ProcessInfoDataWriter

use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.

the class DataLoaderService method buildDataWriter.

protected IDataWriter buildDataWriter(ProcessInfo processInfo, String sourceNodeId, String channelId, long batchId, boolean isRetry) {
    TransformTable[] transforms = null;
    NodeGroupLink link = null;
    List<ResolvedData> resolvedDatas = new ArrayList<ResolvedData>();
    List<IDatabaseWriterFilter> filters = extensionService.getExtensionPointList(IDatabaseWriterFilter.class);
    List<IDatabaseWriterFilter> dynamicFilters = filters;
    List<IDatabaseWriterErrorHandler> errorHandlers = extensionService.getExtensionPointList(IDatabaseWriterErrorHandler.class);
    List<IDatabaseWriterErrorHandler> dynamicErrorHandlers = errorHandlers;
    if (sourceNodeId != null) {
        Node sourceNode = nodeService.findNode(sourceNodeId, true);
        if (sourceNode != null) {
            link = new NodeGroupLink(sourceNode.getNodeGroupId(), parameterService.getNodeGroupId());
        }
        Map<LoadFilterType, Map<String, List<LoadFilter>>> loadFilters = loadFilterService.findLoadFiltersFor(link, true);
        List<DynamicDatabaseWriterFilter> databaseWriterFilters = DynamicDatabaseWriterFilter.getDatabaseWriterFilters(engine, loadFilters);
        if (loadFilters != null && loadFilters.size() > 0) {
            dynamicFilters = new ArrayList<IDatabaseWriterFilter>(filters.size() + 1);
            dynamicFilters.addAll(filters);
            dynamicFilters.addAll(databaseWriterFilters);
            dynamicErrorHandlers = new ArrayList<IDatabaseWriterErrorHandler>(errorHandlers.size() + 1);
            dynamicErrorHandlers.addAll(errorHandlers);
            dynamicErrorHandlers.addAll(databaseWriterFilters);
        }
        List<TransformTableNodeGroupLink> transformsList = transformService.findTransformsFor(link, TransformPoint.LOAD);
        transforms = transformsList != null ? transformsList.toArray(new TransformTable[transformsList.size()]) : null;
        if (isRetry) {
            List<IncomingError> incomingErrors = getIncomingErrors(batchId, sourceNodeId);
            for (IncomingError incomingError : incomingErrors) {
                if (incomingError.isResolveIgnore() || StringUtils.isNotBlank(incomingError.getResolveData())) {
                    resolvedDatas.add(new ResolvedData(incomingError.getFailedRowNumber(), incomingError.getResolveData(), incomingError.isResolveIgnore()));
                }
            }
        }
    }
    TransformWriter transformWriter = new TransformWriter(platform, TransformPoint.LOAD, null, transformService.getColumnTransforms(), transforms);
    IDataWriter targetWriter = getFactory(channelId).getDataWriter(sourceNodeId, symmetricDialect, transformWriter, dynamicFilters, dynamicErrorHandlers, getConflictSettingsNodeGroupLinks(link, false), resolvedDatas);
    transformWriter.setNestedWriter(new ProcessInfoDataWriter(targetWriter, processInfo));
    return transformWriter;
}
Also used : ResolvedData(org.jumpmind.symmetric.io.data.writer.ResolvedData) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) TransformTable(org.jumpmind.symmetric.io.data.transform.TransformTable) DynamicDatabaseWriterFilter(org.jumpmind.symmetric.load.DynamicDatabaseWriterFilter) IncomingError(org.jumpmind.symmetric.model.IncomingError) IDatabaseWriterErrorHandler(org.jumpmind.symmetric.io.data.writer.IDatabaseWriterErrorHandler) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink) IDatabaseWriterFilter(org.jumpmind.symmetric.io.data.writer.IDatabaseWriterFilter) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter) LoadFilterType(org.jumpmind.symmetric.model.LoadFilter.LoadFilterType) LoadFilter(org.jumpmind.symmetric.model.LoadFilter) TransformWriter(org.jumpmind.symmetric.io.data.writer.TransformWriter) ChannelMap(org.jumpmind.symmetric.model.ChannelMap) Map(java.util.Map) HashMap(java.util.HashMap) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) IDataWriter(org.jumpmind.symmetric.io.data.IDataWriter)

Example 5 with ProcessInfoDataWriter

use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.

the class DataExtractorService method sendOutgoingBatch.

protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, IDataWriter dataWriter, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        currentBatch.setSentCount(currentBatch.getSentCount() + 1);
        changeBatchStatus(Status.SE, currentBatch, mode);
        long ts = System.currentTimeMillis();
        IStagedResource extractedBatch = getStagedResource(currentBatch);
        if (extractedBatch != null) {
            IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
            DataContext ctx = new DataContext();
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
            new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
            if (dataWriter.getStatistics().size() > 0) {
                Statistics stats = dataWriter.getStatistics().values().iterator().next();
                statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataWriterStatisticConstants.STATEMENTCOUNT));
                long byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
            } else {
                log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
            }
        } else {
            throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
        }
        currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) DataContext(org.jumpmind.symmetric.io.data.DataContext) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) ProtocolDataReader(org.jumpmind.symmetric.io.data.reader.ProtocolDataReader) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter)

Aggregations

ProcessInfoDataWriter (org.jumpmind.symmetric.model.ProcessInfoDataWriter)6 TransformWriter (org.jumpmind.symmetric.io.data.writer.TransformWriter)4 Node (org.jumpmind.symmetric.model.Node)4 DataContext (org.jumpmind.symmetric.io.data.DataContext)3 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)3 IDataReader (org.jumpmind.symmetric.io.data.IDataReader)3 IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)3 Statistics (org.jumpmind.util.Statistics)3 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 IDataWriter (org.jumpmind.symmetric.io.data.IDataWriter)2 ProtocolDataReader (org.jumpmind.symmetric.io.data.reader.ProtocolDataReader)2 TransformTable (org.jumpmind.symmetric.io.data.transform.TransformTable)2 IDatabaseWriterErrorHandler (org.jumpmind.symmetric.io.data.writer.IDatabaseWriterErrorHandler)2 IDatabaseWriterFilter (org.jumpmind.symmetric.io.data.writer.IDatabaseWriterFilter)2 ResolvedData (org.jumpmind.symmetric.io.data.writer.ResolvedData)2 StagingDataWriter (org.jumpmind.symmetric.io.data.writer.StagingDataWriter)2 DynamicDatabaseWriterFilter (org.jumpmind.symmetric.load.DynamicDatabaseWriterFilter)2 ChannelMap (org.jumpmind.symmetric.model.ChannelMap)2