Search in sources :

Example 6 with Batch

use of org.jumpmind.symmetric.io.data.Batch in project symmetric-ds by JumpMind.

the class ProtocolDataReaderTest method testTableContextSwitch.

@Test
public void testTableContextSwitch() {
    String nodeId = "1";
    long batchId = 1;
    String channelId = "test";
    StringBuilder builder = beginCsv(nodeId);
    beginBatch(builder, batchId, channelId);
    putTableN(builder, 1, true);
    putInsert(builder, 4);
    putTableN(builder, 2, true);
    putInsert(builder, 4);
    putTableN(builder, 1, false);
    putInsert(builder, 2);
    putTableN(builder, 2, false);
    putInsert(builder, 2);
    endCsv(builder);
    ProtocolDataReader reader = new ProtocolDataReader(BatchType.LOAD, "test", builder);
    DataContext ctx = new DataContext(reader);
    reader.open(ctx);
    Batch batch = reader.nextBatch();
    assertNotNull(batch);
    Table table = reader.nextTable();
    assertNotNull(table);
    assertEquals(2, table.getColumnCount());
    assertEquals(1, table.getPrimaryKeyColumnCount());
    assertEquals("test1", table.getName());
    int dataCount = 0;
    while (reader.nextData() != null) {
        dataCount++;
    }
    assertEquals(4, dataCount);
    table = reader.nextTable();
    assertNotNull(table);
    assertEquals(2, table.getColumnCount());
    assertEquals(1, table.getPrimaryKeyColumnCount());
    assertEquals("test2", table.getName());
    dataCount = 0;
    while (reader.nextData() != null) {
        dataCount++;
    }
    assertEquals(4, dataCount);
    table = reader.nextTable();
    assertNotNull(table);
    assertEquals(2, table.getColumnCount());
    assertEquals(1, table.getPrimaryKeyColumnCount());
    assertEquals("test1", table.getName());
    dataCount = 0;
    while (reader.nextData() != null) {
        dataCount++;
    }
    assertEquals(2, dataCount);
    table = reader.nextTable();
    assertNotNull(table);
    assertEquals(2, table.getColumnCount());
    assertEquals(1, table.getPrimaryKeyColumnCount());
    assertEquals("test2", table.getName());
    dataCount = 0;
    while (reader.nextData() != null) {
        dataCount++;
    }
    assertEquals(2, dataCount);
}
Also used : DataContext(org.jumpmind.symmetric.io.data.DataContext) Table(org.jumpmind.db.model.Table) Batch(org.jumpmind.symmetric.io.data.Batch) Test(org.junit.Test)

Example 7 with Batch

use of org.jumpmind.symmetric.io.data.Batch in project symmetric-ds by JumpMind.

the class DataExtractorService method extractConfigurationStandalone.

/**
     * Extract the SymmetricDS configuration for the passed in {@link Node}.
     */
public void extractConfigurationStandalone(Node targetNode, Writer writer, String... tablesToExclude) {
    Node sourceNode = nodeService.findIdentity();
    if (targetNode != null && sourceNode != null) {
        Batch batch = new Batch(BatchType.EXTRACT, Constants.VIRTUAL_BATCH_FOR_REGISTRATION, Constants.CHANNEL_CONFIG, symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), targetNode.getNodeId(), false);
        NodeGroupLink nodeGroupLink = new NodeGroupLink(parameterService.getNodeGroupId(), targetNode.getNodeGroupId());
        List<TriggerRouter> triggerRouters = triggerRouterService.buildTriggerRoutersForSymmetricTables(StringUtils.isBlank(targetNode.getSymmetricVersion()) ? Version.version() : targetNode.getSymmetricVersion(), nodeGroupLink, tablesToExclude);
        List<SelectFromTableEvent> initialLoadEvents = new ArrayList<SelectFromTableEvent>(triggerRouters.size() * 2);
        for (int i = triggerRouters.size() - 1; i >= 0; i--) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, triggerRouter.getTrigger().getSourceTableName());
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        Table table = symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
                        if (table == null) {
                            throw new IllegalStateException("Could not find a required table: " + triggerRouter.getTrigger().getSourceTableName());
                        }
                        triggerHistory = new TriggerHistory(table, triggerRouter.getTrigger(), symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    StringBuilder sql = new StringBuilder(symmetricDialect.createPurgeSqlFor(targetNode, triggerRouter, triggerHistory));
                    addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
                    String sourceTable = triggerHistory.getSourceTableName();
                    Data data = new Data(1, null, sql.toString(), DataEventType.SQL, sourceTable, null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                    data.putAttribute(Data.ATTRIBUTE_ROUTER_ID, triggerRouter.getRouter().getRouterId());
                    initialLoadEvents.add(new SelectFromTableEvent(data));
                }
            }
        }
        for (int i = 0; i < triggerRouters.size(); i++) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, null);
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        triggerHistory = new TriggerHistory(symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false), trigger, symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    Table table = symmetricDialect.getPlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
                    String initialLoadSql = "1=1 order by ";
                    String quote = symmetricDialect.getPlatform().getDdlBuilder().getDatabaseInfo().getDelimiterToken();
                    Column[] pkColumns = table.getPrimaryKeyColumns();
                    for (int j = 0; j < pkColumns.length; j++) {
                        if (j > 0) {
                            initialLoadSql += ", ";
                        }
                        initialLoadSql += quote + pkColumns[j].getName() + quote;
                    }
                    if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
                        initialLoadEvents.add(new SelectFromTableEvent(targetNode, triggerRouter, triggerHistory, initialLoadSql));
                    } else {
                        Data data = new Data(1, null, targetNode.getNodeId(), DataEventType.INSERT, triggerHistory.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                        initialLoadEvents.add(new SelectFromTableEvent(data));
                    }
                }
            }
        }
        SelectFromTableSource source = new SelectFromTableSource(batch, initialLoadEvents);
        ExtractDataReader dataReader = new ExtractDataReader(this.symmetricDialect.getPlatform(), source);
        ProtocolDataWriter dataWriter = new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity());
        DataProcessor processor = new DataProcessor(dataReader, dataWriter, "configuration extract");
        DataContext ctx = new DataContext();
        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
        processor.process(ctx);
        if (triggerRouters.size() == 0) {
            log.error("{} attempted registration, but was sent an empty configuration", targetNode);
        }
    }
}
Also used : TransformTable(org.jumpmind.symmetric.io.data.transform.TransformTable) Table(org.jumpmind.db.model.Table) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) Data(org.jumpmind.symmetric.model.Data) DataMetaData(org.jumpmind.symmetric.model.DataMetaData) CsvData(org.jumpmind.symmetric.io.data.CsvData) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) TransformPoint(org.jumpmind.symmetric.io.data.transform.TransformPoint) DataContext(org.jumpmind.symmetric.io.data.DataContext) Trigger(org.jumpmind.symmetric.model.Trigger) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) Column(org.jumpmind.db.model.Column) PlatformColumn(org.jumpmind.db.model.PlatformColumn) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 8 with Batch

use of org.jumpmind.symmetric.io.data.Batch in project symmetric-ds by JumpMind.

the class DataLoaderService method loadDataFromTransport.

/**
     * Load database from input stream and return a list of batch statuses. This
     * is used for a pull request that responds with data, and the
     * acknowledgment is sent later.
     */
protected List<IncomingBatch> loadDataFromTransport(final ProcessInfo processInfo, final Node sourceNode, IIncomingTransport transport, OutputStream out) throws IOException {
    final ManageIncomingBatchListener listener = new ManageIncomingBatchListener();
    final DataContext ctx = new DataContext();
    Throwable error = null;
    try {
        Node targetNode = nodeService.findIdentity();
        ctx.put(Constants.DATA_CONTEXT_ENGINE, engine);
        if (targetNode != null) {
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
            ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
        }
        if (sourceNode != null) {
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
            ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
        }
        for (ILoadSyncLifecycleListener l : extensionService.getExtensionPointList(ILoadSyncLifecycleListener.class)) {
            l.syncStarted(ctx);
        }
        long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
        String targetNodeId = nodeService.findIdentityNodeId();
        if (parameterService.is(ParameterConstants.STREAM_TO_FILE_ENABLED)) {
            processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
            ExecutorService executor = Executors.newFixedThreadPool(1, new CustomizableThreadFactory(String.format("dataloader-%s-%s", sourceNode.getNodeGroupId(), sourceNode.getNodeId())));
            LoadIntoDatabaseOnArrivalListener loadListener = new LoadIntoDatabaseOnArrivalListener(processInfo, sourceNode.getNodeId(), listener, executor);
            new SimpleStagingDataWriter(transport.openReader(), stagingManager, Constants.STAGING_CATEGORY_INCOMING, memoryThresholdInBytes, BatchType.LOAD, targetNodeId, ctx, loadListener).process();
            /* Previously submitted tasks will still be executed */
            executor.shutdown();
            OutputStreamWriter outWriter = null;
            if (out != null) {
                outWriter = new OutputStreamWriter(out, IoConstants.ENCODING);
                long keepAliveMillis = parameterService.getLong(ParameterConstants.DATA_LOADER_SEND_ACK_KEEPALIVE);
                while (!executor.awaitTermination(keepAliveMillis, TimeUnit.MILLISECONDS)) {
                    outWriter.write("1=1&");
                    outWriter.flush();
                }
            } else {
                executor.awaitTermination(12, TimeUnit.HOURS);
            }
            loadListener.isDone();
        } else {
            DataProcessor processor = new DataProcessor(new ProtocolDataReader(BatchType.LOAD, targetNodeId, transport.openReader()), null, listener, "data load") {

                @Override
                protected IDataWriter chooseDataWriter(Batch batch) {
                    return buildDataWriter(processInfo, sourceNode.getNodeId(), batch.getChannelId(), batch.getBatchId(), ((ManageIncomingBatchListener) listener).getCurrentBatch().isRetry());
                }
            };
            processor.process(ctx);
        }
    } catch (Throwable ex) {
        error = ex;
        logAndRethrow(ex);
    } finally {
        transport.close();
        for (ILoadSyncLifecycleListener l : extensionService.getExtensionPointList(ILoadSyncLifecycleListener.class)) {
            l.syncEnded(ctx, listener.getBatchesProcessed(), error);
        }
    }
    return listener.getBatchesProcessed();
}
Also used : CustomizableThreadFactory(org.jumpmind.util.CustomizableThreadFactory) ILoadSyncLifecycleListener(org.jumpmind.symmetric.load.ILoadSyncLifecycleListener) Node(org.jumpmind.symmetric.model.Node) SimpleStagingDataWriter(org.jumpmind.symmetric.io.data.writer.SimpleStagingDataWriter) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) DataContext(org.jumpmind.symmetric.io.data.DataContext) IncomingBatch(org.jumpmind.symmetric.model.IncomingBatch) Batch(org.jumpmind.symmetric.io.data.Batch) ExecutorService(java.util.concurrent.ExecutorService) OutputStreamWriter(java.io.OutputStreamWriter) ProtocolDataReader(org.jumpmind.symmetric.io.data.reader.ProtocolDataReader)

Example 9 with Batch

use of org.jumpmind.symmetric.io.data.Batch in project symmetric-ds by JumpMind.

the class AbstractWriterTest method writeData.

protected long writeData(IDataWriter writer, TableCsvData... datas) {
    this.lastDataWriterUsed = writer;
    DataContext context = new DataContext();
    writer.open(context);
    try {
        for (TableCsvData tableCsvData : datas) {
            Batch batch = new Batch(BatchType.LOAD, getNextBatchId(), "default", BinaryEncoding.BASE64, "00000", "00001", false);
            try {
                writer.start(batch);
                if (writer.start(tableCsvData.table)) {
                    for (CsvData d : tableCsvData.data) {
                        writer.write(d);
                    }
                    writer.end(tableCsvData.table);
                }
                writer.end(batch, false);
            } catch (IgnoreBatchException ex) {
                writer.end(batch, false);
            } catch (Exception ex) {
                writer.end(batch, true);
                if (!isErrorExpected()) {
                    if (ex instanceof RuntimeException) {
                        throw (RuntimeException) ex;
                    } else {
                        throw new RuntimeException(ex);
                    }
                }
            }
        }
    } finally {
        writer.close();
    }
    long statementCount = 0;
    Collection<Statistics> stats = writer.getStatistics().values();
    for (Statistics statistics : stats) {
        statementCount += statistics.get(DataWriterStatisticConstants.STATEMENTCOUNT);
    }
    return statementCount;
}
Also used : DataContext(org.jumpmind.symmetric.io.data.DataContext) Batch(org.jumpmind.symmetric.io.data.Batch) IgnoreBatchException(org.jumpmind.symmetric.io.data.writer.IgnoreBatchException) Statistics(org.jumpmind.util.Statistics) CsvData(org.jumpmind.symmetric.io.data.CsvData) IgnoreBatchException(org.jumpmind.symmetric.io.data.writer.IgnoreBatchException)

Example 10 with Batch

use of org.jumpmind.symmetric.io.data.Batch in project symmetric-ds by JumpMind.

the class SimpleStagingDataWriter method process.

public void process() throws IOException {
    String catalogLine = null, schemaLine = null, nodeLine = null, binaryLine = null, channelLine = null;
    TableLine tableLine = null;
    Map<TableLine, TableLine> syncTableLines = new HashMap<TableLine, TableLine>();
    Map<TableLine, TableLine> batchTableLines = new HashMap<TableLine, TableLine>();
    IStagedResource resource = null;
    String line = null;
    long startTime = System.currentTimeMillis(), ts = startTime, lineCount = 0;
    while (reader.readRecord()) {
        line = reader.getRawRecord();
        if (line.startsWith(CsvConstants.CATALOG)) {
            catalogLine = line;
            writeLine(line);
        } else if (line.startsWith(CsvConstants.SCHEMA)) {
            schemaLine = line;
            writeLine(line);
        } else if (line.startsWith(CsvConstants.TABLE)) {
            tableLine = new TableLine(catalogLine, schemaLine, line);
            TableLine batchTableLine = batchTableLines.get(tableLine);
            if (batchTableLine != null) {
                tableLine = batchTableLine;
                writeLine(line);
            } else {
                TableLine syncTableLine = syncTableLines.get(tableLine);
                if (syncTableLine != null) {
                    tableLine = syncTableLine;
                    writeLine(tableLine.catalogLine);
                    writeLine(tableLine.schemaLine);
                    writeLine(line);
                    writeLine(tableLine.keysLine);
                    writeLine(tableLine.columnsLine);
                } else {
                    syncTableLines.put(tableLine, tableLine);
                    batchTableLines.put(tableLine, tableLine);
                    writeLine(line);
                }
            }
        } else if (line.startsWith(CsvConstants.KEYS)) {
            tableLine.keysLine = line;
            writeLine(line);
        } else if (line.startsWith(CsvConstants.COLUMNS)) {
            tableLine.columnsLine = line;
            writeLine(line);
        } else if (line.startsWith(CsvConstants.BATCH)) {
            batch = new Batch(batchType, Long.parseLong(getArgLine(line)), getArgLine(channelLine), getBinaryEncoding(binaryLine), getArgLine(nodeLine), targetNodeId, false);
            String location = batch.getStagedLocation();
            resource = stagingManager.find(category, location, batch.getBatchId());
            if (resource == null || resource.getState() == State.DONE) {
                log.debug("Creating staged resource for batch {}", batch.getNodeBatchId());
                resource = stagingManager.create(category, location, batch.getBatchId());
            }
            writer = resource.getWriter(memoryThresholdInBytes);
            writeLine(nodeLine);
            writeLine(binaryLine);
            writeLine(channelLine);
            writeLine(line);
            if (listeners != null) {
                for (IProtocolDataWriterListener listener : listeners) {
                    listener.start(context, batch);
                }
            }
        } else if (line.startsWith(CsvConstants.COMMIT)) {
            if (writer != null) {
                writeLine(line);
                resource.close();
                resource.setState(State.DONE);
                writer = null;
            }
            batchTableLines.clear();
            if (listeners != null) {
                for (IProtocolDataWriterListener listener : listeners) {
                    listener.end(context, batch, resource);
                }
            }
        } else if (line.startsWith(CsvConstants.RETRY)) {
            batch = new Batch(batchType, Long.parseLong(getArgLine(line)), getArgLine(channelLine), getBinaryEncoding(binaryLine), getArgLine(nodeLine), targetNodeId, false);
            String location = batch.getStagedLocation();
            resource = stagingManager.find(category, location, batch.getBatchId());
            if (resource == null || resource.getState() == State.CREATE) {
                resource = null;
                writer = null;
            }
            if (listeners != null) {
                for (IProtocolDataWriterListener listener : listeners) {
                    listener.start(context, batch);
                }
            }
        } else if (line.startsWith(CsvConstants.NODEID)) {
            nodeLine = line;
        } else if (line.startsWith(CsvConstants.BINARY)) {
            binaryLine = line;
        } else if (line.startsWith(CsvConstants.CHANNEL)) {
            channelLine = line;
        } else {
            if (writer == null) {
                throw new IllegalStateException("Invalid batch data was received: " + line);
            }
            TableLine batchLine = batchTableLines.get(tableLine);
            if (batchLine == null || (batchLine != null && batchLine.columnsLine == null)) {
                TableLine syncLine = syncTableLines.get(tableLine);
                if (syncLine != null) {
                    log.debug("Injecting keys and columns to be backwards compatible");
                    if (batchLine == null) {
                        batchLine = syncLine;
                        batchTableLines.put(batchLine, batchLine);
                        writeLine(batchLine.tableLine);
                    }
                    batchLine.keysLine = syncLine.keysLine;
                    writeLine(syncLine.keysLine);
                    batchLine.columnsLine = syncLine.columnsLine;
                    writeLine(syncLine.columnsLine);
                }
            }
            int size = line.length();
            if (size > MAX_WRITE_LENGTH) {
                log.debug("Exceeded max line length with {}", size);
                for (int i = 0; i < size; i = i + MAX_WRITE_LENGTH) {
                    int end = i + MAX_WRITE_LENGTH;
                    writer.append(line, i, end < size ? end : size);
                }
                writer.append("\n");
            } else {
                writeLine(line);
            }
        }
        lineCount++;
        if (System.currentTimeMillis() - ts > 60000) {
            log.info("Batch '{}', for node '{}', for process 'transfer to stage' has been processing for {} seconds.  The following stats have been gathered: {}", new Object[] { (batch != null ? batch.getBatchId() : 0), (batch != null ? batch.getTargetNodeId() : ""), (System.currentTimeMillis() - startTime) / 1000, "LINES=" + lineCount + ", BYTES=" + ((resource == null) ? 0 : resource.getSize()) });
            ts = System.currentTimeMillis();
        }
    }
}
Also used : HashMap(java.util.HashMap) Batch(org.jumpmind.symmetric.io.data.Batch) IStagedResource(org.jumpmind.symmetric.io.stage.IStagedResource)

Aggregations

Batch (org.jumpmind.symmetric.io.data.Batch)16 Table (org.jumpmind.db.model.Table)9 CsvData (org.jumpmind.symmetric.io.data.CsvData)8 DataContext (org.jumpmind.symmetric.io.data.DataContext)8 Column (org.jumpmind.db.model.Column)4 IOException (java.io.IOException)3 IoException (org.jumpmind.exception.IoException)3 OutgoingBatch (org.jumpmind.symmetric.model.OutgoingBatch)3 LinkedHashMap (java.util.LinkedHashMap)2 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)2 IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)2 Node (org.jumpmind.symmetric.model.Node)2 Statistics (org.jumpmind.util.Statistics)2 Test (org.junit.Test)2 XmlPullParserException (org.xmlpull.v1.XmlPullParserException)2 OutputStreamWriter (java.io.OutputStreamWriter)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 ExecutorService (java.util.concurrent.ExecutorService)1 Element (org.jdom.Element)1