Search in sources :

Example 6 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class DataExtractorService method extractOutgoingBatch.

protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter, OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        Node sourceNode = nodeService.findIdentity();
        TransformWriter transformExtractWriter = null;
        if (useStagingDataWriter) {
            long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
        } else {
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
        }
        long ts = System.currentTimeMillis();
        long extractTimeInMs = 0l;
        long byteCount = 0l;
        long transformTimeInMs = 0l;
        if (currentBatch.getStatus() == Status.IG) {
            Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
            batch.setIgnored(true);
            try {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.delete();
                }
                DataContext ctx = new DataContext(batch);
                ctx.put("targetNode", targetNode);
                ctx.put("sourceNode", sourceNode);
                transformExtractWriter.open(ctx);
                transformExtractWriter.start(batch);
                transformExtractWriter.end(batch, false);
            } finally {
                transformExtractWriter.close();
            }
        } else if (!isPreviouslyExtracted(currentBatch)) {
            int maxPermits = parameterService.getInt(ParameterConstants.CONCURRENT_WORKERS);
            String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId()) : currentBatch.getNodeBatchId();
            Semaphore lock = null;
            try {
                synchronized (locks) {
                    lock = locks.get(semaphoreKey);
                    if (lock == null) {
                        lock = new Semaphore(maxPermits);
                        locks.put(semaphoreKey, lock);
                    }
                    try {
                        lock.acquire();
                    } catch (InterruptedException e) {
                        throw new org.jumpmind.exception.InterruptedException(e);
                    }
                }
                synchronized (lock) {
                    if (!isPreviouslyExtracted(currentBatch)) {
                        currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
                        if (updateBatchStatistics) {
                            changeBatchStatus(Status.QY, currentBatch, mode);
                        }
                        currentBatch.resetStats();
                        IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(currentBatch, sourceNode, targetNode, processInfo));
                        DataContext ctx = new DataContext();
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
                        new DataProcessor(dataReader, transformExtractWriter, "extract").process(ctx);
                        extractTimeInMs = System.currentTimeMillis() - ts;
                        Statistics stats = transformExtractWriter.getNestedWriter().getStatistics().values().iterator().next();
                        transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
                        extractTimeInMs = extractTimeInMs - transformTimeInMs;
                        byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                    }
                }
            } catch (RuntimeException ex) {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.close();
                    resource.delete();
                }
                throw ex;
            } finally {
                lock.release();
                synchronized (locks) {
                    if (lock.availablePermits() == maxPermits) {
                        locks.remove(semaphoreKey);
                    }
                }
            }
        }
        if (updateBatchStatistics) {
            long dataEventCount = currentBatch.getDataEventCount();
            long insertEventCount = currentBatch.getInsertEventCount();
            currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
            // preserve in the case of a reload event
            if (dataEventCount > currentBatch.getDataEventCount()) {
                currentBatch.setDataEventCount(dataEventCount);
            }
            // preserve in the case of a reload event
            if (insertEventCount > currentBatch.getInsertEventCount()) {
                currentBatch.setInsertEventCount(insertEventCount);
            }
            // "re-queried"
            if (extractTimeInMs > 0) {
                currentBatch.setExtractMillis(extractTimeInMs);
            }
            if (byteCount > 0) {
                currentBatch.setByteCount(byteCount);
                statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
                statisticManager.incrementDataExtracted(currentBatch.getChannelId(), currentBatch.getExtractCount());
            }
        }
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) Node(org.jumpmind.symmetric.model.Node) Semaphore(java.util.concurrent.Semaphore) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) DataContext(org.jumpmind.symmetric.io.data.DataContext) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) StagingDataWriter(org.jumpmind.symmetric.io.data.writer.StagingDataWriter) TransformWriter(org.jumpmind.symmetric.io.data.writer.TransformWriter) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 7 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class PushUriHandler method handle.

public void handle(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
    String nodeId = ServletUtils.getParameter(req, WebConstants.NODE_ID);
    String channelId = getChannelId(req);
    log.info("About to service push request for {}", nodeId);
    IStagingManager stagingManager = engine.getStagingManager();
    IDataLoaderService dataLoaderService = engine.getDataLoaderService();
    INodeService nodeService = engine.getNodeService();
    IStatisticManager statisticManager = engine.getStatisticManager();
    String identityNodeId = nodeService.findIdentityNodeId();
    ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey(nodeId, identityNodeId, ProcessType.TRANSFER_FROM, channelId));
    BufferedReader reader = null;
    BufferedWriter writer = null;
    DataLoaderWorker worker = null;
    try {
        Node sourceNode = engine.getNodeService().findNode(nodeId);
        processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
        reader = new BufferedReader(new InputStreamReader(createInputStream(req)));
        long streamToFileThreshold = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
        String line = reader.readLine();
        StringBuilder batchPrefix = new StringBuilder();
        Long batchId = null;
        while (line != null) {
            if (line.startsWith(CsvConstants.BATCH)) {
                batchId = getBatchId(line);
                IStagedResource resource = stagingManager.create(streamToFileThreshold, Constants.STAGING_CATEGORY_INCOMING, nodeId, batchId);
                writer = resource.getWriter();
                writer.write(batchPrefix.toString());
            } else if (line.startsWith(CsvConstants.COMMIT)) {
                writer.write(line);
                writer.close();
                writer = null;
                if (worker == null) {
                    worker = dataLoaderService.createDataLoaderWorker(ProcessType.LOAD_FROM_PUSH, channelId, sourceNode);
                }
                worker.queueUpLoad(new IncomingBatch(batchId, nodeId));
                batchId = null;
            }
            if (batchId == null) {
                batchPrefix.append(line).append("\n");
            } else if (writer != null) {
                writer.write(line);
                writer.write("\n");
            }
            line = reader.readLine();
        }
        processInfo.setStatus(ProcessInfo.Status.OK);
    } catch (RuntimeException ex) {
        processInfo.setStatus(ProcessInfo.Status.ERROR);
        throw ex;
    } finally {
        IOUtils.closeQuietly(reader);
        IOUtils.closeQuietly(writer);
    }
    PrintWriter resWriter = res.getWriter();
    if (worker != null) {
        worker.queueUpLoad(new DataLoaderService.EOM());
        while (!worker.isComplete()) {
            String status = "done";
            IncomingBatch batch = worker.waitForNextBatchToComplete();
            if (batch == null) {
                status = "in progress";
                batch = worker.getCurrentlyLoading();
            }
            if (batch != null && !(batch instanceof DataLoaderService.EOM)) {
                ArrayList<IncomingBatch> list = new ArrayList<IncomingBatch>(1);
                list.add(batch);
                log.info("sending {} ack ... for {}", status, batch);
                // TODO 13 support
                resWriter.write(engine.getTransportManager().getAcknowledgementData(false, identityNodeId, list));
                resWriter.write("\n");
                resWriter.flush();
            }
        }
    }
    res.flushBuffer();
    log.debug("Done servicing push request for {}", nodeId);
}
Also used : InputStreamReader(java.io.InputStreamReader) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) IncomingBatch(org.jumpmind.symmetric.model.IncomingBatch) BufferedWriter(java.io.BufferedWriter) IStatisticManager(org.jumpmind.symmetric.statistic.IStatisticManager) IStagingManager(org.jumpmind.symmetric.io.stage.IStagingManager) IDataLoaderService(org.jumpmind.symmetric.service.IDataLoaderService) INodeService(org.jumpmind.symmetric.service.INodeService) BufferedReader(java.io.BufferedReader) DataLoaderService(org.jumpmind.symmetric.service.impl.DataLoaderService) IDataLoaderService(org.jumpmind.symmetric.service.IDataLoaderService) DataLoaderWorker(org.jumpmind.symmetric.service.impl.DataLoaderService.DataLoaderWorker) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) PrintWriter(java.io.PrintWriter)

Example 8 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class StagingDataWriter method print.

@Override
protected void print(Batch batch, String data) {
    if (log.isDebugEnabled() && data != null) {
        log.debug("Writing staging data: {}", FormatUtils.abbreviateForLogging(data));
    }
    IStagedResource resource = getStagedResource(batch);
    BufferedWriter writer = resource.getWriter();
    try {
        int size = data.length();
        for (int i = 0; i < size; i = i + 1024) {
            int end = i + 1024;
            writer.append(data, i, end < size ? end : size);
        }
    } catch (IOException ex) {
        throw new IoException(ex);
    }
}
Also used : IoException(org.jumpmind.exception.IoException) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) IOException(java.io.IOException) BufferedWriter(java.io.BufferedWriter)

Example 9 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class StagingDataWriter method getStagedResource.

protected IStagedResource getStagedResource(Batch batch) {
    IStagedResource resource = stagedResources.get(batch);
    if (resource == null) {
        String location = batch.getStagedLocation();
        resource = stagingManager.find(category, location, batch.getBatchId());
        if (resource == null || resource.getState() == State.DONE) {
            log.debug("Creating staged resource for batch {}", batch.getNodeBatchId());
            resource = stagingManager.create(memoryThresholdInBytes, category, location, batch.getBatchId());
        }
        stagedResources.put(batch, resource);
    }
    return resource;
}
Also used : IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Example 10 with IStagedResource

use of org.jumpmind.symmetric.io.stage.IStagedResource in project symmetric-ds by JumpMind.

the class DataExtractorService method cleanupIgnoredBatch.

protected void cleanupIgnoredBatch(Node sourceNode, Node targetNode, OutgoingBatch currentBatch, IDataWriter writer) {
    Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
    batch.setIgnored(true);
    try {
        IStagedResource resource = getStagedResource(currentBatch);
        if (resource != null) {
            resource.delete();
        }
        DataContext ctx = new DataContext(batch);
        ctx.put("targetNode", targetNode);
        ctx.put("sourceNode", sourceNode);
        writer.open(ctx);
        writer.start(batch);
        writer.end(batch, false);
    } finally {
        writer.close();
    }
}
Also used : DataContext(org.jumpmind.symmetric.io.data.DataContext) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Aggregations

IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)17 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)8 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 IoException (org.jumpmind.exception.IoException)4 DataContext (org.jumpmind.symmetric.io.data.DataContext)4 Node (org.jumpmind.symmetric.model.Node)4 SQLException (java.sql.SQLException)3 CancellationException (java.util.concurrent.CancellationException)3 Batch (org.jumpmind.symmetric.io.data.Batch)3 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)3 IDataReader (org.jumpmind.symmetric.io.data.IDataReader)3 ProtocolException (org.jumpmind.symmetric.io.data.ProtocolException)3 IStagingManager (org.jumpmind.symmetric.io.stage.IStagingManager)3 Channel (org.jumpmind.symmetric.model.Channel)3 NodeChannel (org.jumpmind.symmetric.model.NodeChannel)3 ProcessInfoDataWriter (org.jumpmind.symmetric.model.ProcessInfoDataWriter)3 Statistics (org.jumpmind.util.Statistics)3 BufferedWriter (java.io.BufferedWriter)2 Calendar (java.util.Calendar)2