Search in sources :

Example 6 with TriggerHistory

use of org.jumpmind.symmetric.model.TriggerHistory in project symmetric-ds by JumpMind.

the class SnapshotUtil method createSnapshot.

public static File createSnapshot(ISymmetricEngine engine) {
    String dirName = engine.getEngineName().replaceAll(" ", "-") + "-" + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());
    IParameterService parameterService = engine.getParameterService();
    File tmpDir = new File(parameterService.getTempDirectory(), dirName);
    tmpDir.mkdirs();
    File logDir = null;
    String parameterizedLogDir = parameterService.getString("server.log.dir");
    if (isNotBlank(parameterizedLogDir)) {
        logDir = new File(parameterizedLogDir);
    }
    if (logDir != null && logDir.exists()) {
        log.info("Using server.log.dir setting as the location of the log files");
    } else {
        logDir = new File("logs");
        if (!logDir.exists()) {
            Map<File, Layout> matches = findSymmetricLogFile();
            if (matches != null && matches.size() == 1) {
                logDir = matches.keySet().iterator().next().getParentFile();
            }
        }
        if (!logDir.exists()) {
            logDir = new File("../logs");
        }
        if (!logDir.exists()) {
            logDir = new File("target");
        }
        if (logDir.exists()) {
            File[] files = logDir.listFiles();
            if (files != null) {
                for (File file : files) {
                    if (file.getName().toLowerCase().endsWith(".log")) {
                        try {
                            FileUtils.copyFileToDirectory(file, tmpDir);
                        } catch (IOException e) {
                            log.warn("Failed to copy " + file.getName() + " to the snapshot directory", e);
                        }
                    }
                }
            }
        }
    }
    FileWriter fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "config-export.csv"));
        engine.getDataExtractorService().extractConfigurationStandalone(engine.getNodeService().findIdentity(), fwriter, TableConstants.SYM_NODE, TableConstants.SYM_NODE_SECURITY, TableConstants.SYM_NODE_IDENTITY, TableConstants.SYM_NODE_HOST, TableConstants.SYM_NODE_CHANNEL_CTL, TableConstants.SYM_CONSOLE_USER, TableConstants.SYM_MONITOR_EVENT, TableConstants.SYM_CONSOLE_EVENT);
    } catch (Exception e) {
        log.warn("Failed to export symmetric configuration", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }
    File serviceConfFile = new File("conf/sym_service.conf");
    try {
        if (serviceConfFile.exists()) {
            FileUtils.copyFileToDirectory(serviceConfFile, tmpDir);
        }
    } catch (Exception e) {
        log.warn("Failed to copy " + serviceConfFile.getName() + " to the snapshot directory", e);
    }
    TreeSet<Table> tables = new TreeSet<Table>();
    FileOutputStream fos = null;
    try {
        ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
        List<TriggerHistory> triggerHistories = triggerRouterService.getActiveTriggerHistories();
        for (TriggerHistory triggerHistory : triggerHistories) {
            Table table = engine.getDatabasePlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
            if (table != null && !table.getName().toUpperCase().startsWith(engine.getSymmetricDialect().getTablePrefix().toUpperCase())) {
                tables.add(table);
            }
        }
        List<Trigger> triggers = triggerRouterService.getTriggers();
        for (Trigger trigger : triggers) {
            Table table = engine.getDatabasePlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
            if (table != null) {
                tables.add(table);
            }
        }
        fos = new FileOutputStream(new File(tmpDir, "table-definitions.xml"));
        DbExport export = new DbExport(engine.getDatabasePlatform());
        export.setFormat(Format.XML);
        export.setNoData(true);
        export.exportTables(fos, tables.toArray(new Table[tables.size()]));
    } catch (Exception e) {
        log.warn("Failed to export table definitions", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }
    String tablePrefix = engine.getTablePrefix();
    DbExport export = new DbExport(engine.getDatabasePlatform());
    export.setFormat(Format.CSV);
    export.setNoCreateInfo(true);
    extract(export, new File(tmpDir, "sym_identity.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_IDENTITY));
    extract(export, new File(tmpDir, "sym_node.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE));
    extract(export, new File(tmpDir, "sym_node_security.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_SECURITY));
    extract(export, new File(tmpDir, "sym_node_host.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_HOST));
    extract(export, new File(tmpDir, "sym_trigger_hist.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_TRIGGER_HIST));
    try {
        if (!parameterService.is(ParameterConstants.CLUSTER_LOCKING_ENABLED)) {
            engine.getNodeCommunicationService().persistToTableForSnapshot();
            engine.getClusterService().persistToTableForSnapshot();
        }
    } catch (Exception e) {
        log.warn("Unable to add SYM_NODE_COMMUNICATION to the snapshot.", e);
    }
    extract(export, new File(tmpDir, "sym_lock.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_LOCK));
    extract(export, new File(tmpDir, "sym_node_communication.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_COMMUNICATION));
    extract(export, 10000, "order by create_time desc", new File(tmpDir, "sym_outgoing_batch.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH));
    extract(export, 10000, "where status != 'OK' order by create_time", new File(tmpDir, "sym_outgoing_batch_not_ok.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH));
    extract(export, 10000, "order by create_time desc", new File(tmpDir, "sym_incoming_batch.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH));
    extract(export, 10000, "where status != 'OK' order by create_time", new File(tmpDir, "sym_incoming_batch_not_ok.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH));
    extract(export, 5000, "order by start_id, end_id desc", new File(tmpDir, "sym_data_gap.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_DATA_GAP));
    extract(export, new File(tmpDir, "sym_table_reload_request.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_TABLE_RELOAD_REQUEST));
    extract(export, 5000, "order by relative_dir, file_name", new File(tmpDir, "sym_file_snapshot.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_FILE_SNAPSHOT));
    extract(export, new File(tmpDir, "sym_console_event.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_CONSOLE_EVENT));
    extract(export, new File(tmpDir, "sym_monitor_event.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_MONITOR_EVENT));
    extract(export, new File(tmpDir, "sym_extract_request.csv"), TableConstants.getTableName(tablePrefix, TableConstants.SYM_EXTRACT_REQUEST));
    if (engine.getSymmetricDialect() instanceof FirebirdSymmetricDialect) {
        final String[] monTables = { "mon$database", "mon$attachments", "mon$transactions", "mon$statements", "mon$io_stats", "mon$record_stats", "mon$memory_usage", "mon$call_stack", "mon$context_variables" };
        for (String table : monTables) {
            extract(export, new File(tmpDir, "firebird-" + table + ".csv"), table);
        }
    }
    fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "threads.txt"));
        ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
        long[] threadIds = threadBean.getAllThreadIds();
        for (long l : threadIds) {
            ThreadInfo info = threadBean.getThreadInfo(l, 100);
            if (info != null) {
                String threadName = info.getThreadName();
                fwriter.append(StringUtils.rightPad(threadName, THREAD_INDENT_SPACE));
                fwriter.append(AppUtils.formatStackTrace(info.getStackTrace(), THREAD_INDENT_SPACE, false));
                fwriter.append("\n");
            }
        }
    } catch (Exception e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }
    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters.properties"));
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        SortedProperties parameters = new SortedProperties();
        parameters.putAll(effectiveParameters);
        parameters.remove("db.password");
        parameters.store(fos, "parameters.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameter information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }
    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters-changed.properties"));
        Properties defaultParameters = new Properties();
        InputStream in = SnapshotUtil.class.getResourceAsStream("/symmetric-default.properties");
        defaultParameters.load(in);
        IOUtils.closeQuietly(in);
        in = SnapshotUtil.class.getResourceAsStream("/symmetric-console-default.properties");
        if (in != null) {
            defaultParameters.load(in);
            IOUtils.closeQuietly(in);
        }
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        Properties changedParameters = new SortedProperties();
        Map<String, ParameterMetaData> parameters = ParameterConstants.getParameterMetaData();
        for (String key : parameters.keySet()) {
            String defaultValue = defaultParameters.getProperty((String) key);
            String currentValue = effectiveParameters.getProperty((String) key);
            if (defaultValue == null && currentValue != null || (defaultValue != null && !defaultValue.equals(currentValue))) {
                changedParameters.put(key, currentValue == null ? "" : currentValue);
            }
        }
        changedParameters.remove("db.password");
        changedParameters.store(fos, "parameters-changed.properties");
    } catch (Exception e) {
        log.warn("Failed to export parameters-changed information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }
    writeRuntimeStats(engine, tmpDir);
    writeJobsStats(engine, tmpDir);
    if ("true".equals(System.getProperty(SystemConstants.SYSPROP_STANDALONE_WEB))) {
        writeDirectoryListing(engine, tmpDir);
    }
    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "system.properties"));
        SortedProperties props = new SortedProperties();
        props.putAll(System.getProperties());
        props.store(fos, "system.properties");
    } catch (Exception e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }
    try {
        File jarFile = new File(getSnapshotDirectory(engine), tmpDir.getName() + ".zip");
        JarBuilder builder = new JarBuilder(tmpDir, jarFile, new File[] { tmpDir }, Version.version());
        builder.build();
        FileUtils.deleteDirectory(tmpDir);
        return jarFile;
    } catch (Exception e) {
        throw new IoException("Failed to package snapshot files into archive", e);
    }
}
Also used : FileWriter(java.io.FileWriter) JarBuilder(org.jumpmind.util.JarBuilder) IParameterService(org.jumpmind.symmetric.service.IParameterService) Properties(java.util.Properties) Trigger(org.jumpmind.symmetric.model.Trigger) ThreadInfo(java.lang.management.ThreadInfo) TreeSet(java.util.TreeSet) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) ThreadMXBean(java.lang.management.ThreadMXBean) Table(org.jumpmind.db.model.Table) FirebirdSymmetricDialect(org.jumpmind.symmetric.db.firebird.FirebirdSymmetricDialect) ITriggerRouterService(org.jumpmind.symmetric.service.ITriggerRouterService) InputStream(java.io.InputStream) IOException(java.io.IOException) Date(java.util.Date) IoException(org.jumpmind.exception.IoException) IOException(java.io.IOException) DbExport(org.jumpmind.symmetric.io.data.DbExport) Layout(org.apache.log4j.Layout) FileOutputStream(java.io.FileOutputStream) IoException(org.jumpmind.exception.IoException) SimpleDateFormat(java.text.SimpleDateFormat) File(java.io.File) ParameterMetaData(org.jumpmind.properties.DefaultParameterParser.ParameterMetaData)

Example 7 with TriggerHistory

use of org.jumpmind.symmetric.model.TriggerHistory in project symmetric-ds by JumpMind.

the class DataService method insertReloadEvents.

public void insertReloadEvents(Node targetNode, boolean reverse, List<TableReloadRequest> reloadRequests, ProcessInfo processInfo) {
    if (engine.getClusterService().lock(ClusterConstants.SYNCTRIGGERS)) {
        try {
            synchronized (engine.getTriggerRouterService()) {
                engine.getClusterService().lock(ClusterConstants.SYNCTRIGGERS);
                boolean isFullLoad = reloadRequests == null || (reloadRequests.size() == 1 && reloadRequests.get(0).isFullLoadRequest());
                if (!reverse) {
                    log.info("Queueing up " + (isFullLoad ? "an initial" : "a") + " load to node " + targetNode.getNodeId());
                } else {
                    log.info("Queueing up a reverse " + (isFullLoad ? "initial" : "") + " load to node " + targetNode.getNodeId());
                }
                /*
                     * Outgoing data events are pointless because we are
                     * reloading all data
                     */
                if (isFullLoad) {
                    engine.getOutgoingBatchService().markAllAsSentForNode(targetNode.getNodeId(), false);
                }
                INodeService nodeService = engine.getNodeService();
                ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
                Node sourceNode = nodeService.findIdentity();
                boolean transactional = parameterService.is(ParameterConstants.DATA_RELOAD_IS_BATCH_INSERT_TRANSACTIONAL);
                String nodeIdRecord = reverse ? nodeService.findIdentityNodeId() : targetNode.getNodeId();
                NodeSecurity nodeSecurity = nodeService.findNodeSecurity(nodeIdRecord);
                ISqlTransaction transaction = null;
                try {
                    transaction = platform.getSqlTemplate().startSqlTransaction();
                    long loadId = engine.getSequenceService().nextVal(transaction, Constants.SEQUENCE_OUTGOING_BATCH_LOAD_ID);
                    processInfo.setCurrentLoadId(loadId);
                    String createBy = reverse ? nodeSecurity.getRevInitialLoadCreateBy() : nodeSecurity.getInitialLoadCreateBy();
                    List<TriggerHistory> triggerHistories = new ArrayList<TriggerHistory>();
                    if (isFullLoad) {
                        triggerHistories = triggerRouterService.getActiveTriggerHistories();
                    } else {
                        for (TableReloadRequest reloadRequest : reloadRequests) {
                            triggerHistories.addAll(engine.getTriggerRouterService().getActiveTriggerHistories(new Trigger(reloadRequest.getTriggerId(), null)));
                        }
                    }
                    processInfo.setDataCount(triggerHistories.size());
                    Map<Integer, List<TriggerRouter>> triggerRoutersByHistoryId = triggerRouterService.fillTriggerRoutersByHistIdAndSortHist(sourceNode.getNodeGroupId(), targetNode.getNodeGroupId(), triggerHistories);
                    if (isFullLoad) {
                        callReloadListeners(true, targetNode, transactional, transaction, loadId);
                        insertCreateSchemaScriptPriorToReload(targetNode, nodeIdRecord, loadId, createBy, transactional, transaction);
                    }
                    Map<String, TableReloadRequest> mapReloadRequests = convertReloadListToMap(reloadRequests);
                    String symNodeSecurityReloadChannel = null;
                    try {
                        symNodeSecurityReloadChannel = triggerRoutersByHistoryId.get(triggerHistories.get(0).getTriggerHistoryId()).get(0).getTrigger().getReloadChannelId();
                    } catch (Exception e) {
                    }
                    if (isFullLoad || (reloadRequests != null && reloadRequests.size() > 0)) {
                        insertSqlEventsPriorToReload(targetNode, nodeIdRecord, loadId, createBy, transactional, transaction, reverse, triggerHistories, triggerRoutersByHistoryId, mapReloadRequests, isFullLoad, symNodeSecurityReloadChannel);
                    }
                    insertCreateBatchesForReload(targetNode, loadId, createBy, triggerHistories, triggerRoutersByHistoryId, transactional, transaction, mapReloadRequests);
                    insertDeleteBatchesForReload(targetNode, loadId, createBy, triggerHistories, triggerRoutersByHistoryId, transactional, transaction, mapReloadRequests);
                    insertSQLBatchesForReload(targetNode, loadId, createBy, triggerHistories, triggerRoutersByHistoryId, transactional, transaction, mapReloadRequests);
                    insertLoadBatchesForReload(targetNode, loadId, createBy, triggerHistories, triggerRoutersByHistoryId, transactional, transaction, mapReloadRequests, processInfo);
                    if (isFullLoad) {
                        String afterSql = parameterService.getString(reverse ? ParameterConstants.INITIAL_LOAD_REVERSE_AFTER_SQL : ParameterConstants.INITIAL_LOAD_AFTER_SQL);
                        if (isNotBlank(afterSql)) {
                            insertSqlEvent(transaction, targetNode, afterSql, true, loadId, createBy);
                        }
                    }
                    insertFileSyncBatchForReload(targetNode, loadId, createBy, transactional, transaction, processInfo);
                    if (isFullLoad) {
                        callReloadListeners(false, targetNode, transactional, transaction, loadId);
                        if (!reverse) {
                            nodeService.setInitialLoadEnabled(transaction, nodeIdRecord, false, false, loadId, createBy);
                        } else {
                            nodeService.setReverseInitialLoadEnabled(transaction, nodeIdRecord, false, false, loadId, createBy);
                        }
                    }
                    if (!Constants.DEPLOYMENT_TYPE_REST.equals(targetNode.getDeploymentType())) {
                        insertNodeSecurityUpdate(transaction, nodeIdRecord, targetNode.getNodeId(), true, loadId, createBy, symNodeSecurityReloadChannel);
                    }
                    engine.getStatisticManager().incrementNodesLoaded(1);
                    if (reloadRequests != null && reloadRequests.size() > 0) {
                        for (TableReloadRequest request : reloadRequests) {
                            transaction.prepareAndExecute(getSql("updateProcessedTableReloadRequest"), loadId, new Date(), request.getTargetNodeId(), request.getSourceNodeId(), request.getTriggerId(), request.getRouterId(), request.getCreateTime());
                        }
                        log.info("Table reload request(s) for load id " + loadId + " have been processed.");
                    }
                    transaction.commit();
                } catch (Error ex) {
                    if (transaction != null) {
                        transaction.rollback();
                    }
                    throw ex;
                } catch (RuntimeException ex) {
                    if (transaction != null) {
                        transaction.rollback();
                    }
                    throw ex;
                } finally {
                    close(transaction);
                }
                if (!reverse) {
                    /*
                         * Remove all incoming events for the node that we are
                         * starting a reload for
                         */
                    engine.getPurgeService().purgeAllIncomingEventsForNode(targetNode.getNodeId());
                }
            }
        } finally {
            engine.getClusterService().unlock(ClusterConstants.SYNCTRIGGERS);
        }
    } else {
        log.info("Not attempting to insert reload events because sync trigger is currently running");
    }
}
Also used : NodeSecurity(org.jumpmind.symmetric.model.NodeSecurity) ITriggerRouterService(org.jumpmind.symmetric.service.ITriggerRouterService) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) UniqueKeyException(org.jumpmind.db.sql.UniqueKeyException) NotImplementedException(org.apache.commons.lang.NotImplementedException) SymmetricException(org.jumpmind.symmetric.SymmetricException) IoException(org.jumpmind.exception.IoException) Date(java.util.Date) ISqlTransaction(org.jumpmind.db.sql.ISqlTransaction) Trigger(org.jumpmind.symmetric.model.Trigger) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) INodeService(org.jumpmind.symmetric.service.INodeService) TableReloadRequest(org.jumpmind.symmetric.model.TableReloadRequest) List(java.util.List) ArrayList(java.util.ArrayList)

Example 8 with TriggerHistory

use of org.jumpmind.symmetric.model.TriggerHistory in project symmetric-ds by JumpMind.

the class DataService method insertSqlEvent.

public void insertSqlEvent(ISqlTransaction transaction, Node targetNode, String sql, boolean isLoad, long loadId, String createBy) {
    TriggerHistory history = engine.getTriggerRouterService().findTriggerHistoryForGenericSync();
    insertSqlEvent(transaction, history, Constants.CHANNEL_CONFIG, targetNode, sql, isLoad, loadId, createBy);
}
Also used : TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory)

Example 9 with TriggerHistory

use of org.jumpmind.symmetric.model.TriggerHistory in project symmetric-ds by JumpMind.

the class DataService method insertReloadEvent.

public boolean insertReloadEvent(TableReloadRequest request, boolean deleteAtClient) {
    boolean successful = false;
    if (request != null) {
        ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
        INodeService nodeService = engine.getNodeService();
        Node targetNode = nodeService.findNode(request.getTargetNodeId());
        if (targetNode != null) {
            TriggerRouter triggerRouter = triggerRouterService.getTriggerRouterForCurrentNode(request.getTriggerId(), request.getRouterId(), false);
            if (triggerRouter != null) {
                Trigger trigger = triggerRouter.getTrigger();
                Router router = triggerRouter.getRouter();
                NodeGroupLink link = router.getNodeGroupLink();
                Node me = nodeService.findIdentity();
                if (link.getSourceNodeGroupId().equals(me.getNodeGroupId())) {
                    if (link.getTargetNodeGroupId().equals(targetNode.getNodeGroupId())) {
                        TriggerHistory triggerHistory = lookupTriggerHistory(trigger);
                        ISqlTransaction transaction = null;
                        try {
                            transaction = sqlTemplate.startSqlTransaction();
                            if (parameterService.is(ParameterConstants.INITIAL_LOAD_DELETE_BEFORE_RELOAD)) {
                                String overrideDeleteStatement = StringUtils.isNotBlank(request.getBeforeCustomSql()) ? request.getBeforeCustomSql() : null;
                                insertPurgeEvent(transaction, targetNode, triggerRouter, triggerHistory, false, overrideDeleteStatement, -1, null);
                            }
                            insertReloadEvent(transaction, targetNode, triggerRouter, triggerHistory, request.getReloadSelect(), false, -1, null, Status.NE);
                            if (!targetNode.requires13Compatiblity() && deleteAtClient) {
                                insertSqlEvent(transaction, triggerHistory, trigger.getChannelId(), targetNode, String.format("delete from %s where target_node_id='%s' and source_node_id='%s' and trigger_id='%s' and router_id='%s'", TableConstants.getTableName(tablePrefix, TableConstants.SYM_TABLE_RELOAD_REQUEST), request.getTargetNodeId(), request.getSourceNodeId(), request.getTriggerId(), request.getRouterId()), false, -1, null);
                            }
                            deleteTableReloadRequest(transaction, request);
                            transaction.commit();
                        } catch (Error ex) {
                            if (transaction != null) {
                                transaction.rollback();
                            }
                            throw ex;
                        } catch (RuntimeException ex) {
                            if (transaction != null) {
                                transaction.rollback();
                            }
                            throw ex;
                        } finally {
                            close(transaction);
                        }
                    } else {
                        log.error("Could not reload table {} for node {} because the router {} target node group id {} did not match", new Object[] { trigger.getSourceTableName(), request.getTargetNodeId(), request.getRouterId(), link.getTargetNodeGroupId() });
                    }
                } else {
                    log.error("Could not reload table {}  for node {} because the router {} source node group id {} did not match", new Object[] { trigger.getSourceTableName(), request.getTargetNodeId(), request.getRouterId(), link.getSourceNodeGroupId() });
                }
            } else {
                log.error("Could not reload table for node {} because the trigger router ({}, {}) could not be found", new Object[] { request.getTargetNodeId(), request.getTriggerId(), request.getRouterId() });
            }
        } else {
            log.error("Could not reload table for node {} because the target node could not be found", request.getTargetNodeId());
        }
    }
    return successful;
}
Also used : ITriggerRouterService(org.jumpmind.symmetric.service.ITriggerRouterService) Node(org.jumpmind.symmetric.model.Node) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) Router(org.jumpmind.symmetric.model.Router) ISqlTransaction(org.jumpmind.db.sql.ISqlTransaction) Trigger(org.jumpmind.symmetric.model.Trigger) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) INodeService(org.jumpmind.symmetric.service.INodeService) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink)

Example 10 with TriggerHistory

use of org.jumpmind.symmetric.model.TriggerHistory in project symmetric-ds by JumpMind.

the class DataExtractorService method extractConfigurationStandalone.

/**
     * Extract the SymmetricDS configuration for the passed in {@link Node}.
     */
public void extractConfigurationStandalone(Node targetNode, Writer writer, String... tablesToExclude) {
    Node sourceNode = nodeService.findIdentity();
    if (targetNode != null && sourceNode != null) {
        Batch batch = new Batch(BatchType.EXTRACT, Constants.VIRTUAL_BATCH_FOR_REGISTRATION, Constants.CHANNEL_CONFIG, symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), targetNode.getNodeId(), false);
        NodeGroupLink nodeGroupLink = new NodeGroupLink(parameterService.getNodeGroupId(), targetNode.getNodeGroupId());
        List<TriggerRouter> triggerRouters = triggerRouterService.buildTriggerRoutersForSymmetricTables(StringUtils.isBlank(targetNode.getSymmetricVersion()) ? Version.version() : targetNode.getSymmetricVersion(), nodeGroupLink, tablesToExclude);
        List<SelectFromTableEvent> initialLoadEvents = new ArrayList<SelectFromTableEvent>(triggerRouters.size() * 2);
        for (int i = triggerRouters.size() - 1; i >= 0; i--) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, triggerRouter.getTrigger().getSourceTableName());
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        Table table = symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
                        if (table == null) {
                            throw new IllegalStateException("Could not find a required table: " + triggerRouter.getTrigger().getSourceTableName());
                        }
                        triggerHistory = new TriggerHistory(table, triggerRouter.getTrigger(), symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    StringBuilder sql = new StringBuilder(symmetricDialect.createPurgeSqlFor(targetNode, triggerRouter, triggerHistory));
                    addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
                    String sourceTable = triggerHistory.getSourceTableName();
                    Data data = new Data(1, null, sql.toString(), DataEventType.SQL, sourceTable, null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                    data.putAttribute(Data.ATTRIBUTE_ROUTER_ID, triggerRouter.getRouter().getRouterId());
                    initialLoadEvents.add(new SelectFromTableEvent(data));
                }
            }
        }
        for (int i = 0; i < triggerRouters.size(); i++) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, null);
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        triggerHistory = new TriggerHistory(symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false), trigger, symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    Table table = symmetricDialect.getPlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
                    String initialLoadSql = "1=1 order by ";
                    String quote = symmetricDialect.getPlatform().getDdlBuilder().getDatabaseInfo().getDelimiterToken();
                    Column[] pkColumns = table.getPrimaryKeyColumns();
                    for (int j = 0; j < pkColumns.length; j++) {
                        if (j > 0) {
                            initialLoadSql += ", ";
                        }
                        initialLoadSql += quote + pkColumns[j].getName() + quote;
                    }
                    if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
                        initialLoadEvents.add(new SelectFromTableEvent(targetNode, triggerRouter, triggerHistory, initialLoadSql));
                    } else {
                        Data data = new Data(1, null, targetNode.getNodeId(), DataEventType.INSERT, triggerHistory.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                        initialLoadEvents.add(new SelectFromTableEvent(data));
                    }
                }
            }
        }
        SelectFromTableSource source = new SelectFromTableSource(batch, initialLoadEvents);
        ExtractDataReader dataReader = new ExtractDataReader(this.symmetricDialect.getPlatform(), source);
        ProtocolDataWriter dataWriter = new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity());
        DataProcessor processor = new DataProcessor(dataReader, dataWriter, "configuration extract");
        DataContext ctx = new DataContext();
        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
        processor.process(ctx);
        if (triggerRouters.size() == 0) {
            log.error("{} attempted registration, but was sent an empty configuration", targetNode);
        }
    }
}
Also used : TransformTable(org.jumpmind.symmetric.io.data.transform.TransformTable) Table(org.jumpmind.db.model.Table) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) Data(org.jumpmind.symmetric.model.Data) DataMetaData(org.jumpmind.symmetric.model.DataMetaData) CsvData(org.jumpmind.symmetric.io.data.CsvData) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) TransformPoint(org.jumpmind.symmetric.io.data.transform.TransformPoint) DataContext(org.jumpmind.symmetric.io.data.DataContext) Trigger(org.jumpmind.symmetric.model.Trigger) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) Column(org.jumpmind.db.model.Column) PlatformColumn(org.jumpmind.db.model.PlatformColumn) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Aggregations

TriggerHistory (org.jumpmind.symmetric.model.TriggerHistory)50 Table (org.jumpmind.db.model.Table)19 Data (org.jumpmind.symmetric.model.Data)19 Node (org.jumpmind.symmetric.model.Node)17 DataMetaData (org.jumpmind.symmetric.model.DataMetaData)14 Trigger (org.jumpmind.symmetric.model.Trigger)14 TriggerRouter (org.jumpmind.symmetric.model.TriggerRouter)13 HashSet (java.util.HashSet)12 Test (org.junit.Test)12 ArrayList (java.util.ArrayList)10 Router (org.jumpmind.symmetric.model.Router)10 NodeChannel (org.jumpmind.symmetric.model.NodeChannel)9 ITriggerRouterService (org.jumpmind.symmetric.service.ITriggerRouterService)8 List (java.util.List)6 CsvData (org.jumpmind.symmetric.io.data.CsvData)6 SymmetricException (org.jumpmind.symmetric.SymmetricException)5 TableReloadRequest (org.jumpmind.symmetric.model.TableReloadRequest)5 Date (java.util.Date)4 ISqlTransaction (org.jumpmind.db.sql.ISqlTransaction)4 HashMap (java.util.HashMap)3