Search in sources :

Example 1 with ProcessInfoKey

use of org.jumpmind.symmetric.model.ProcessInfoKey in project symmetric-ds by JumpMind.

the class DataLoaderService method loadDataFromPush.

/**
     * Load database from input stream and write acknowledgment to output
     * stream. This is used for a "push" request with a response of an
     * acknowledgment.
     */
public void loadDataFromPush(Node sourceNode, String channelId, InputStream in, OutputStream out) throws IOException {
    Node local = nodeService.findIdentity();
    if (local != null) {
        ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey(sourceNode.getNodeId(), channelId, local.getNodeId(), ProcessInfoKey.ProcessType.PUSH_HANDLER));
        try {
            List<IncomingBatch> batchList = loadDataFromTransport(processInfo, sourceNode, new InternalIncomingTransport(in), out);
            logDataReceivedFromPush(sourceNode, batchList);
            NodeSecurity security = nodeService.findNodeSecurity(local.getNodeId());
            processInfo.setStatus(ProcessInfo.Status.ACKING);
            transportManager.writeAcknowledgement(out, sourceNode, batchList, local, security != null ? security.getNodePassword() : null);
            if (containsError(batchList)) {
                processInfo.setStatus(ProcessInfo.Status.ERROR);
            } else {
                processInfo.setStatus(ProcessInfo.Status.OK);
            }
        } catch (Exception e) {
            processInfo.setStatus(ProcessInfo.Status.ERROR);
            if (e instanceof RuntimeException) {
                throw (RuntimeException) e;
            } else if (e instanceof IOException) {
                throw (IOException) e;
            }
            throw new RuntimeException(e);
        }
    } else {
        throw new SymmetricException("Could not load data because the node is not registered");
    }
}
Also used : NodeSecurity(org.jumpmind.symmetric.model.NodeSecurity) Node(org.jumpmind.symmetric.model.Node) SymmetricException(org.jumpmind.symmetric.SymmetricException) InternalIncomingTransport(org.jumpmind.symmetric.transport.internal.InternalIncomingTransport) ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) IOException(java.io.IOException) IncomingBatch(org.jumpmind.symmetric.model.IncomingBatch) SymmetricException(org.jumpmind.symmetric.SymmetricException) SQLException(java.sql.SQLException) ConnectException(java.net.ConnectException) ConflictException(org.jumpmind.symmetric.io.data.writer.ConflictException) IOException(java.io.IOException) RegistrationNotOpenException(org.jumpmind.symmetric.service.RegistrationNotOpenException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) ServiceUnavailableException(org.jumpmind.symmetric.transport.ServiceUnavailableException) UniqueKeyException(org.jumpmind.db.sql.UniqueKeyException) SqlException(org.jumpmind.db.sql.SqlException) TransportException(org.jumpmind.symmetric.transport.TransportException) RegistrationRequiredException(org.jumpmind.symmetric.service.RegistrationRequiredException) ConnectionRejectedException(org.jumpmind.symmetric.transport.ConnectionRejectedException) SyncDisabledException(org.jumpmind.symmetric.transport.SyncDisabledException) IoException(org.jumpmind.exception.IoException) MalformedURLException(java.net.MalformedURLException) AuthenticationException(org.jumpmind.symmetric.transport.AuthenticationException)

Example 2 with ProcessInfoKey

use of org.jumpmind.symmetric.model.ProcessInfoKey in project symmetric-ds by JumpMind.

the class DataGapFastDetector method afterRouting.

/**
     * Always make sure sym_data_gap is up to date to make sure that we don't
     * dual route data.
     */
public void afterRouting() {
    ProcessInfo processInfo = this.statisticManager.newProcessInfo(new ProcessInfoKey(nodeService.findIdentityNodeId(), null, ProcessType.GAP_DETECT));
    processInfo.setStatus(Status.PROCESSING);
    long printStats = System.currentTimeMillis();
    long gapTimoutInMs = parameterService.getLong(ParameterConstants.ROUTING_STALE_DATA_ID_GAP_TIME);
    final int dataIdIncrementBy = parameterService.getInt(ParameterConstants.DATA_ID_INCREMENT_BY);
    Date currentDate = new Date(routingStartTime);
    boolean isBusyExpire = false;
    long lastBusyExpireRunTime = getLastBusyExpireRunTime();
    if (!isAllDataRead) {
        if (lastBusyExpireRunTime == 0) {
            setLastBusyExpireRunTime(System.currentTimeMillis());
        } else {
            long busyExpireMillis = parameterService.getLong(ParameterConstants.ROUTING_STALE_GAP_BUSY_EXPIRE_TIME);
            isBusyExpire = System.currentTimeMillis() - lastBusyExpireRunTime >= busyExpireMillis;
        }
    } else if (lastBusyExpireRunTime != 0) {
        setLastBusyExpireRunTime(0);
    }
    try {
        long ts = System.currentTimeMillis();
        long lastDataId = -1;
        int dataIdCount = 0;
        int rangeChecked = 0;
        int expireChecked = 0;
        gapsAll.addAll(gaps);
        Map<DataGap, List<Long>> dataIdMap = getDataIdMap();
        if (System.currentTimeMillis() - ts > 30000) {
            log.info("It took {}ms to map {} data IDs into {} gaps", new Object[] { System.currentTimeMillis() - ts, dataIds.size(), gaps.size() });
        }
        for (final DataGap dataGap : gaps) {
            final boolean lastGap = dataGap.equals(gaps.get(gaps.size() - 1));
            lastDataId = -1;
            List<Long> ids = dataIdMap.get(dataGap);
            dataIdCount += ids.size();
            rangeChecked += dataGap.getEndId() - dataGap.getStartId();
            // if we found data in the gap
            if (ids.size() > 0) {
                gapsDeleted.add(dataGap);
                gapsAll.remove(dataGap);
            // if we did not find data in the gap and it was not the last gap
            } else if (!lastGap && (isAllDataRead || isBusyExpire)) {
                Date createTime = dataGap.getCreateTime();
                boolean isExpired = false;
                if (supportsTransactionViews) {
                    isExpired = createTime != null && (createTime.getTime() < earliestTransactionTime || earliestTransactionTime == 0);
                } else {
                    isExpired = createTime != null && routingStartTime - createTime.getTime() > gapTimoutInMs;
                }
                if (isExpired) {
                    boolean isGapEmpty = false;
                    if (!isAllDataRead) {
                        isGapEmpty = dataService.countDataInRange(dataGap.getStartId() - 1, dataGap.getEndId() + 1) == 0;
                        expireChecked++;
                    }
                    if (isAllDataRead || isGapEmpty) {
                        if (log.isDebugEnabled()) {
                            if (dataGap.getStartId() == dataGap.getEndId()) {
                                log.debug("Found a gap in data_id at {}.  Skipping it because " + (supportsTransactionViews ? "there are no pending transactions" : "the gap expired"), dataGap.getStartId());
                            } else {
                                log.debug("Found a gap in data_id from {} to {}.  Skipping it because " + (supportsTransactionViews ? "there are no pending transactions" : "the gap expired"), dataGap.getStartId(), dataGap.getEndId());
                            }
                        }
                        gapsDeleted.add(dataGap);
                        gapsAll.remove(dataGap);
                    }
                }
            }
            for (Number number : ids) {
                long dataId = number.longValue();
                processInfo.incrementCurrentDataCount();
                if (lastDataId == -1 && dataGap.getStartId() + dataIdIncrementBy <= dataId) {
                    // there was a new gap at the start
                    addDataGap(new DataGap(dataGap.getStartId(), dataId - 1, currentDate));
                } else if (lastDataId != -1 && lastDataId + dataIdIncrementBy != dataId && lastDataId != dataId) {
                    // found a gap somewhere in the existing gap
                    addDataGap(new DataGap(lastDataId + 1, dataId - 1, currentDate));
                }
                lastDataId = dataId;
            }
            // if we found data in the gap
            if (lastDataId != -1 && !lastGap && lastDataId + dataIdIncrementBy <= dataGap.getEndId()) {
                addDataGap(new DataGap(lastDataId + dataIdIncrementBy, dataGap.getEndId(), currentDate));
            }
            if (System.currentTimeMillis() - printStats > 30000) {
                log.info("The data gap detection has been running for {}ms, detected {} rows over a gap range of {}, " + "found {} new gaps, found old {} gaps, and checked data in {} gaps", new Object[] { System.currentTimeMillis() - ts, dataIdCount, rangeChecked, gapsAdded.size(), gapsDeleted.size(), expireChecked });
                printStats = System.currentTimeMillis();
            }
        }
        if (lastDataId != -1) {
            DataGap newGap = new DataGap(lastDataId + 1, lastDataId + maxDataToSelect, currentDate);
            if (addDataGap(newGap)) {
                log.debug("Inserting new last data gap: {}", newGap);
            }
        }
        printStats = saveDataGaps(ts, printStats);
        setFullGapAnalysis(false);
        if (isBusyExpire) {
            setLastBusyExpireRunTime(System.currentTimeMillis());
        }
        long updateTimeInMs = System.currentTimeMillis() - ts;
        if (updateTimeInMs > 10000) {
            log.info("Detecting gaps took {} ms", updateTimeInMs);
        }
        processInfo.setStatus(Status.OK);
    } catch (RuntimeException ex) {
        processInfo.setStatus(Status.ERROR);
        throw ex;
    }
}
Also used : ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) Date(java.util.Date) DataGap(org.jumpmind.symmetric.model.DataGap) ArrayList(java.util.ArrayList) List(java.util.List)

Example 3 with ProcessInfoKey

use of org.jumpmind.symmetric.model.ProcessInfoKey in project symmetric-ds by JumpMind.

the class OfflinePushService method pushToNode.

private void pushToNode(Node remote, RemoteNodeStatus status) {
    Node identity = nodeService.findIdentity();
    FileOutgoingTransport transport = null;
    ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey(identity.getNodeId(), status.getChannelId(), remote.getNodeId(), ProcessType.OFFLINE_PUSH));
    List<OutgoingBatch> extractedBatches = null;
    try {
        transport = (FileOutgoingTransport) transportManager.getPushTransport(remote, identity, null, null);
        extractedBatches = dataExtractorService.extract(processInfo, remote, status.getChannelId(), transport);
        if (extractedBatches.size() > 0) {
            log.info("Offline push data written for {} at {}", remote, transport.getOutgoingDir());
            List<BatchAck> batchAcks = readAcks(extractedBatches, transport, transportManager, acknowledgeService);
            status.updateOutgoingStatus(extractedBatches, batchAcks);
        }
        if (processInfo.getStatus() != Status.ERROR) {
            processInfo.setStatus(Status.OK);
        }
    } catch (Exception ex) {
        processInfo.setStatus(Status.ERROR);
        log.error("Failed to write offline file", ex);
    } finally {
        transport.close();
        transport.complete(processInfo.getStatus() == Status.OK);
    }
}
Also used : BatchAck(org.jumpmind.symmetric.model.BatchAck) FileOutgoingTransport(org.jumpmind.symmetric.transport.file.FileOutgoingTransport) Node(org.jumpmind.symmetric.model.Node) ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch)

Example 4 with ProcessInfoKey

use of org.jumpmind.symmetric.model.ProcessInfoKey in project symmetric-ds by JumpMind.

the class PushUriHandler method handle.

public void handle(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
    String nodeId = ServletUtils.getParameter(req, WebConstants.NODE_ID);
    String channelId = getChannelId(req);
    log.info("About to service push request for {}", nodeId);
    IStagingManager stagingManager = engine.getStagingManager();
    IDataLoaderService dataLoaderService = engine.getDataLoaderService();
    INodeService nodeService = engine.getNodeService();
    IStatisticManager statisticManager = engine.getStatisticManager();
    String identityNodeId = nodeService.findIdentityNodeId();
    ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey(nodeId, identityNodeId, ProcessType.TRANSFER_FROM, channelId));
    BufferedReader reader = null;
    BufferedWriter writer = null;
    DataLoaderWorker worker = null;
    try {
        Node sourceNode = engine.getNodeService().findNode(nodeId);
        processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
        reader = new BufferedReader(new InputStreamReader(createInputStream(req)));
        long streamToFileThreshold = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
        String line = reader.readLine();
        StringBuilder batchPrefix = new StringBuilder();
        Long batchId = null;
        while (line != null) {
            if (line.startsWith(CsvConstants.BATCH)) {
                batchId = getBatchId(line);
                IStagedResource resource = stagingManager.create(streamToFileThreshold, Constants.STAGING_CATEGORY_INCOMING, nodeId, batchId);
                writer = resource.getWriter();
                writer.write(batchPrefix.toString());
            } else if (line.startsWith(CsvConstants.COMMIT)) {
                writer.write(line);
                writer.close();
                writer = null;
                if (worker == null) {
                    worker = dataLoaderService.createDataLoaderWorker(ProcessType.LOAD_FROM_PUSH, channelId, sourceNode);
                }
                worker.queueUpLoad(new IncomingBatch(batchId, nodeId));
                batchId = null;
            }
            if (batchId == null) {
                batchPrefix.append(line).append("\n");
            } else if (writer != null) {
                writer.write(line);
                writer.write("\n");
            }
            line = reader.readLine();
        }
        processInfo.setStatus(ProcessInfo.Status.OK);
    } catch (RuntimeException ex) {
        processInfo.setStatus(ProcessInfo.Status.ERROR);
        throw ex;
    } finally {
        IOUtils.closeQuietly(reader);
        IOUtils.closeQuietly(writer);
    }
    PrintWriter resWriter = res.getWriter();
    if (worker != null) {
        worker.queueUpLoad(new DataLoaderService.EOM());
        while (!worker.isComplete()) {
            String status = "done";
            IncomingBatch batch = worker.waitForNextBatchToComplete();
            if (batch == null) {
                status = "in progress";
                batch = worker.getCurrentlyLoading();
            }
            if (batch != null && !(batch instanceof DataLoaderService.EOM)) {
                ArrayList<IncomingBatch> list = new ArrayList<IncomingBatch>(1);
                list.add(batch);
                log.info("sending {} ack ... for {}", status, batch);
                // TODO 13 support
                resWriter.write(engine.getTransportManager().getAcknowledgementData(false, identityNodeId, list));
                resWriter.write("\n");
                resWriter.flush();
            }
        }
    }
    res.flushBuffer();
    log.debug("Done servicing push request for {}", nodeId);
}
Also used : InputStreamReader(java.io.InputStreamReader) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) IncomingBatch(org.jumpmind.symmetric.model.IncomingBatch) BufferedWriter(java.io.BufferedWriter) IStatisticManager(org.jumpmind.symmetric.statistic.IStatisticManager) IStagingManager(org.jumpmind.symmetric.io.stage.IStagingManager) IDataLoaderService(org.jumpmind.symmetric.service.IDataLoaderService) INodeService(org.jumpmind.symmetric.service.INodeService) BufferedReader(java.io.BufferedReader) DataLoaderService(org.jumpmind.symmetric.service.impl.DataLoaderService) IDataLoaderService(org.jumpmind.symmetric.service.IDataLoaderService) DataLoaderWorker(org.jumpmind.symmetric.service.impl.DataLoaderService.DataLoaderWorker) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) PrintWriter(java.io.PrintWriter)

Example 5 with ProcessInfoKey

use of org.jumpmind.symmetric.model.ProcessInfoKey in project symmetric-ds by JumpMind.

the class FileSyncPullUriHandler method handle.

public void handle(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
    String nodeId = ServletUtils.getParameter(req, WebConstants.NODE_ID);
    if (StringUtils.isBlank(nodeId)) {
        ServletUtils.sendError(res, HttpServletResponse.SC_BAD_REQUEST, "Node must be specified");
        return;
    } else {
        log.debug("File sync pull request received from {}", nodeId);
    }
    IOutgoingTransport outgoingTransport = createOutgoingTransport(res.getOutputStream(), req.getHeader(WebConstants.HEADER_ACCEPT_CHARSET), engine.getConfigurationService().getSuspendIgnoreChannelLists(nodeId));
    ProcessInfo processInfo = engine.getStatisticManager().newProcessInfo(new ProcessInfoKey(engine.getNodeService().findIdentityNodeId(), nodeId, ProcessType.FILE_SYNC_PULL_HANDLER));
    try {
        res.setContentType("application/zip");
        res.addHeader("Content-Disposition", "attachment; filename=\"file-sync.zip\"");
        engine.getFileSyncService().sendFiles(processInfo, engine.getNodeService().findNode(nodeId), outgoingTransport);
        processInfo.setStatus(Status.OK);
    } catch (RuntimeException ex) {
        processInfo.setStatus(Status.ERROR);
        throw ex;
    } finally {
        if (outgoingTransport != null) {
            outgoingTransport.close();
        }
    }
}
Also used : ProcessInfoKey(org.jumpmind.symmetric.model.ProcessInfoKey) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) IOutgoingTransport(org.jumpmind.symmetric.transport.IOutgoingTransport)

Aggregations

ProcessInfoKey (org.jumpmind.symmetric.model.ProcessInfoKey)26 ProcessInfo (org.jumpmind.symmetric.model.ProcessInfo)25 Node (org.jumpmind.symmetric.model.Node)15 IOException (java.io.IOException)10 IncomingBatch (org.jumpmind.symmetric.model.IncomingBatch)10 NodeSecurity (org.jumpmind.symmetric.model.NodeSecurity)9 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)7 ArrayList (java.util.ArrayList)5 IoException (org.jumpmind.exception.IoException)5 SymmetricException (org.jumpmind.symmetric.SymmetricException)5 ChannelMap (org.jumpmind.symmetric.model.ChannelMap)4 INodeService (org.jumpmind.symmetric.service.INodeService)4 IOutgoingTransport (org.jumpmind.symmetric.transport.IOutgoingTransport)4 MalformedURLException (java.net.MalformedURLException)3 Date (java.util.Date)3 ISymmetricEngine (org.jumpmind.symmetric.ISymmetricEngine)3 BatchAck (org.jumpmind.symmetric.model.BatchAck)3 NodeChannel (org.jumpmind.symmetric.model.NodeChannel)3 RegistrationRequiredException (org.jumpmind.symmetric.service.RegistrationRequiredException)3 InternalIncomingTransport (org.jumpmind.symmetric.transport.internal.InternalIncomingTransport)3