Search in sources :

Example 1 with ExtractDataReader

use of org.jumpmind.symmetric.io.data.reader.ExtractDataReader in project symmetric-ds by JumpMind.

the class DataExtractorService method extractBatchRange.

public boolean extractBatchRange(Writer writer, String nodeId, Date startBatchTime, Date endBatchTime, String... channelIds) {
    boolean foundBatch = false;
    Node sourceNode = nodeService.findIdentity();
    OutgoingBatches batches = outgoingBatchService.getOutgoingBatchRange(nodeId, startBatchTime, endBatchTime, channelIds);
    List<OutgoingBatch> list = batches.getBatches();
    for (OutgoingBatch outgoingBatch : list) {
        Node targetNode = nodeService.findNode(nodeId);
        if (targetNode == null && Constants.UNROUTED_NODE_ID.equals(nodeId)) {
            targetNode = new Node();
            targetNode.setNodeId("-1");
        }
        if (targetNode != null) {
            IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(outgoingBatch, sourceNode, targetNode, new ProcessInfo()));
            DataContext ctx = new DataContext();
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
            new DataProcessor(dataReader, createTransformDataWriter(nodeService.findIdentity(), targetNode, new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity())), "extract range").process(ctx);
            foundBatch = true;
        }
    }
    return foundBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) DataContext(org.jumpmind.symmetric.io.data.DataContext) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Node(org.jumpmind.symmetric.model.Node) OutgoingBatches(org.jumpmind.symmetric.model.OutgoingBatches) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 2 with ExtractDataReader

use of org.jumpmind.symmetric.io.data.reader.ExtractDataReader in project symmetric-ds by JumpMind.

the class DataExtractorService method extractOutgoingBatch.

protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter, OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics, ExtractMode mode) {
    if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
        Node sourceNode = nodeService.findIdentity();
        TransformWriter transformExtractWriter = null;
        if (useStagingDataWriter) {
            long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
        } else {
            transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
        }
        long ts = System.currentTimeMillis();
        long extractTimeInMs = 0l;
        long byteCount = 0l;
        long transformTimeInMs = 0l;
        if (currentBatch.getStatus() == Status.IG) {
            Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
            batch.setIgnored(true);
            try {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.delete();
                }
                DataContext ctx = new DataContext(batch);
                ctx.put("targetNode", targetNode);
                ctx.put("sourceNode", sourceNode);
                transformExtractWriter.open(ctx);
                transformExtractWriter.start(batch);
                transformExtractWriter.end(batch, false);
            } finally {
                transformExtractWriter.close();
            }
        } else if (!isPreviouslyExtracted(currentBatch)) {
            int maxPermits = parameterService.getInt(ParameterConstants.CONCURRENT_WORKERS);
            String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId()) : currentBatch.getNodeBatchId();
            Semaphore lock = null;
            try {
                synchronized (locks) {
                    lock = locks.get(semaphoreKey);
                    if (lock == null) {
                        lock = new Semaphore(maxPermits);
                        locks.put(semaphoreKey, lock);
                    }
                    try {
                        lock.acquire();
                    } catch (InterruptedException e) {
                        throw new org.jumpmind.exception.InterruptedException(e);
                    }
                }
                synchronized (lock) {
                    if (!isPreviouslyExtracted(currentBatch)) {
                        currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
                        if (updateBatchStatistics) {
                            changeBatchStatus(Status.QY, currentBatch, mode);
                        }
                        currentBatch.resetStats();
                        IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(currentBatch, sourceNode, targetNode, processInfo));
                        DataContext ctx = new DataContext();
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
                        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
                        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
                        new DataProcessor(dataReader, transformExtractWriter, "extract").process(ctx);
                        extractTimeInMs = System.currentTimeMillis() - ts;
                        Statistics stats = transformExtractWriter.getNestedWriter().getStatistics().values().iterator().next();
                        transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
                        extractTimeInMs = extractTimeInMs - transformTimeInMs;
                        byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
                    }
                }
            } catch (RuntimeException ex) {
                IStagedResource resource = getStagedResource(currentBatch);
                if (resource != null) {
                    resource.close();
                    resource.delete();
                }
                throw ex;
            } finally {
                lock.release();
                synchronized (locks) {
                    if (lock.availablePermits() == maxPermits) {
                        locks.remove(semaphoreKey);
                    }
                }
            }
        }
        if (updateBatchStatistics) {
            long dataEventCount = currentBatch.getDataEventCount();
            long insertEventCount = currentBatch.getInsertEventCount();
            currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
            // preserve in the case of a reload event
            if (dataEventCount > currentBatch.getDataEventCount()) {
                currentBatch.setDataEventCount(dataEventCount);
            }
            // preserve in the case of a reload event
            if (insertEventCount > currentBatch.getInsertEventCount()) {
                currentBatch.setInsertEventCount(insertEventCount);
            }
            // "re-queried"
            if (extractTimeInMs > 0) {
                currentBatch.setExtractMillis(extractTimeInMs);
            }
            if (byteCount > 0) {
                currentBatch.setByteCount(byteCount);
                statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
                statisticManager.incrementDataExtracted(currentBatch.getChannelId(), currentBatch.getExtractCount());
            }
        }
    }
    return currentBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) Node(org.jumpmind.symmetric.model.Node) Semaphore(java.util.concurrent.Semaphore) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) Statistics(org.jumpmind.util.Statistics) DataContext(org.jumpmind.symmetric.io.data.DataContext) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource) StagingDataWriter(org.jumpmind.symmetric.io.data.writer.StagingDataWriter) TransformWriter(org.jumpmind.symmetric.io.data.writer.TransformWriter) ProcessInfoDataWriter(org.jumpmind.symmetric.model.ProcessInfoDataWriter) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 3 with ExtractDataReader

use of org.jumpmind.symmetric.io.data.reader.ExtractDataReader in project symmetric-ds by JumpMind.

the class DataExtractorService method extractConfigurationStandalone.

/**
 * Extract the SymmetricDS configuration for the passed in {@link Node}.
 */
public void extractConfigurationStandalone(Node targetNode, Writer writer, String... tablesToExclude) {
    Node sourceNode = nodeService.findIdentity();
    if (targetNode != null && sourceNode != null) {
        Batch batch = new Batch(BatchType.EXTRACT, Constants.VIRTUAL_BATCH_FOR_REGISTRATION, Constants.CHANNEL_CONFIG, symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), targetNode.getNodeId(), false);
        NodeGroupLink nodeGroupLink = new NodeGroupLink(parameterService.getNodeGroupId(), targetNode.getNodeGroupId());
        List<TriggerRouter> triggerRouters = triggerRouterService.buildTriggerRoutersForSymmetricTables(StringUtils.isBlank(targetNode.getSymmetricVersion()) ? Version.version() : targetNode.getSymmetricVersion(), nodeGroupLink, tablesToExclude);
        List<SelectFromTableEvent> initialLoadEvents = new ArrayList<SelectFromTableEvent>(triggerRouters.size() * 2);
        boolean pre37 = Version.isOlderThanVersion(targetNode.getSymmetricVersion(), "3.7.0");
        for (int i = triggerRouters.size() - 1; i >= 0; i--) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (!(pre37 && triggerRouter.getTrigger().getSourceTableName().toLowerCase().contains("extension"))) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, triggerRouter.getTrigger().getSourceTableName());
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        Table table = symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
                        if (table == null) {
                            throw new IllegalStateException("Could not find a required table: " + triggerRouter.getTrigger().getSourceTableName());
                        }
                        triggerHistory = new TriggerHistory(table, triggerRouter.getTrigger(), symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    StringBuilder sql = new StringBuilder(symmetricDialect.createPurgeSqlFor(targetNode, triggerRouter, triggerHistory));
                    addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
                    String sourceTable = triggerHistory.getSourceTableName();
                    Data data = new Data(1, null, sql.toString(), DataEventType.SQL, sourceTable, null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                    data.putAttribute(Data.ATTRIBUTE_ROUTER_ID, triggerRouter.getRouter().getRouterId());
                    initialLoadEvents.add(new SelectFromTableEvent(data));
                }
            }
        }
        for (int i = 0; i < triggerRouters.size(); i++) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (!(pre37 && triggerRouter.getTrigger().getSourceTableName().toLowerCase().contains("extension"))) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, null);
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        triggerHistory = new TriggerHistory(symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false), trigger, symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    Table table = symmetricDialect.getPlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
                    String initialLoadSql = "1=1 order by ";
                    String quote = symmetricDialect.getPlatform().getDdlBuilder().getDatabaseInfo().getDelimiterToken();
                    Column[] pkColumns = table.getPrimaryKeyColumns();
                    for (int j = 0; j < pkColumns.length; j++) {
                        if (j > 0) {
                            initialLoadSql += ", ";
                        }
                        initialLoadSql += quote + pkColumns[j].getName() + quote;
                    }
                    if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
                        initialLoadEvents.add(new SelectFromTableEvent(targetNode, triggerRouter, triggerHistory, initialLoadSql));
                    } else {
                        Data data = new Data(1, null, targetNode.getNodeId(), DataEventType.INSERT, triggerHistory.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                        initialLoadEvents.add(new SelectFromTableEvent(data));
                    }
                }
            }
        }
        SelectFromTableSource source = new SelectFromTableSource(batch, initialLoadEvents);
        ExtractDataReader dataReader = new ExtractDataReader(this.symmetricDialect.getPlatform(), source);
        ProtocolDataWriter dataWriter = new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity());
        DataProcessor processor = new DataProcessor(dataReader, dataWriter, "configuration extract");
        DataContext ctx = new DataContext();
        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
        processor.process(ctx);
        if (triggerRouters.size() == 0) {
            log.error("{} attempted registration, but was sent an empty configuration", targetNode);
        }
    }
}
Also used : TransformTable(org.jumpmind.symmetric.io.data.transform.TransformTable) Table(org.jumpmind.db.model.Table) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) Data(org.jumpmind.symmetric.model.Data) DataMetaData(org.jumpmind.symmetric.model.DataMetaData) CsvData(org.jumpmind.symmetric.io.data.CsvData) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) TransformPoint(org.jumpmind.symmetric.io.data.transform.TransformPoint) DataContext(org.jumpmind.symmetric.io.data.DataContext) Trigger(org.jumpmind.symmetric.model.Trigger) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) Column(org.jumpmind.db.model.Column) PlatformColumn(org.jumpmind.db.model.PlatformColumn) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 4 with ExtractDataReader

use of org.jumpmind.symmetric.io.data.reader.ExtractDataReader in project symmetric-ds by JumpMind.

the class DataExtractorService method extractBatchRange.

public boolean extractBatchRange(Writer writer, String nodeId, long startBatchId, long endBatchId) {
    boolean foundBatch = false;
    Node sourceNode = nodeService.findIdentity();
    for (long batchId = startBatchId; batchId <= endBatchId; batchId++) {
        OutgoingBatch batch = outgoingBatchService.findOutgoingBatch(batchId, nodeId);
        if (batch != null) {
            Node targetNode = nodeService.findNode(nodeId);
            if (targetNode == null && Constants.UNROUTED_NODE_ID.equals(nodeId)) {
                targetNode = new Node();
                targetNode.setNodeId("-1");
            }
            if (targetNode != null) {
                IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(batch, sourceNode, targetNode, new ProcessInfo()));
                DataContext ctx = new DataContext();
                ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
                ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
                new DataProcessor(dataReader, createTransformDataWriter(nodeService.findIdentity(), targetNode, new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity())), "extract range").process(ctx);
                foundBatch = true;
            }
        }
    }
    return foundBatch;
}
Also used : IDataReader(org.jumpmind.symmetric.io.data.IDataReader) DataContext(org.jumpmind.symmetric.io.data.DataContext) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Node(org.jumpmind.symmetric.model.Node) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) ProcessInfo(org.jumpmind.symmetric.model.ProcessInfo) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Aggregations

DataContext (org.jumpmind.symmetric.io.data.DataContext)4 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)4 ExtractDataReader (org.jumpmind.symmetric.io.data.reader.ExtractDataReader)4 Node (org.jumpmind.symmetric.model.Node)4 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)4 IDataReader (org.jumpmind.symmetric.io.data.IDataReader)3 ProtocolDataWriter (org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter)3 Batch (org.jumpmind.symmetric.io.data.Batch)2 ProcessInfo (org.jumpmind.symmetric.model.ProcessInfo)2 ArrayList (java.util.ArrayList)1 Semaphore (java.util.concurrent.Semaphore)1 Column (org.jumpmind.db.model.Column)1 PlatformColumn (org.jumpmind.db.model.PlatformColumn)1 Table (org.jumpmind.db.model.Table)1 CsvData (org.jumpmind.symmetric.io.data.CsvData)1 TransformPoint (org.jumpmind.symmetric.io.data.transform.TransformPoint)1 TransformTable (org.jumpmind.symmetric.io.data.transform.TransformTable)1 StagingDataWriter (org.jumpmind.symmetric.io.data.writer.StagingDataWriter)1 TransformWriter (org.jumpmind.symmetric.io.data.writer.TransformWriter)1 IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)1