Search in sources :

Example 66 with Column

use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.

the class DataExtractorService method extractConfigurationStandalone.

/**
 * Extract the SymmetricDS configuration for the passed in {@link Node}.
 */
public void extractConfigurationStandalone(Node targetNode, Writer writer, String... tablesToExclude) {
    Node sourceNode = nodeService.findIdentity();
    if (targetNode != null && sourceNode != null) {
        Batch batch = new Batch(BatchType.EXTRACT, Constants.VIRTUAL_BATCH_FOR_REGISTRATION, Constants.CHANNEL_CONFIG, symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), targetNode.getNodeId(), false);
        NodeGroupLink nodeGroupLink = new NodeGroupLink(parameterService.getNodeGroupId(), targetNode.getNodeGroupId());
        List<TriggerRouter> triggerRouters = triggerRouterService.buildTriggerRoutersForSymmetricTables(StringUtils.isBlank(targetNode.getSymmetricVersion()) ? Version.version() : targetNode.getSymmetricVersion(), nodeGroupLink, tablesToExclude);
        List<SelectFromTableEvent> initialLoadEvents = new ArrayList<SelectFromTableEvent>(triggerRouters.size() * 2);
        boolean pre37 = Version.isOlderThanVersion(targetNode.getSymmetricVersion(), "3.7.0");
        for (int i = triggerRouters.size() - 1; i >= 0; i--) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (!(pre37 && triggerRouter.getTrigger().getSourceTableName().toLowerCase().contains("extension"))) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, triggerRouter.getTrigger().getSourceTableName());
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        Table table = symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
                        if (table == null) {
                            throw new IllegalStateException("Could not find a required table: " + triggerRouter.getTrigger().getSourceTableName());
                        }
                        triggerHistory = new TriggerHistory(table, triggerRouter.getTrigger(), symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    StringBuilder sql = new StringBuilder(symmetricDialect.createPurgeSqlFor(targetNode, triggerRouter, triggerHistory));
                    addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
                    String sourceTable = triggerHistory.getSourceTableName();
                    Data data = new Data(1, null, sql.toString(), DataEventType.SQL, sourceTable, null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                    data.putAttribute(Data.ATTRIBUTE_ROUTER_ID, triggerRouter.getRouter().getRouterId());
                    initialLoadEvents.add(new SelectFromTableEvent(data));
                }
            }
        }
        for (int i = 0; i < triggerRouters.size(); i++) {
            TriggerRouter triggerRouter = triggerRouters.get(i);
            String channelId = triggerRouter.getTrigger().getChannelId();
            if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
                if (!(pre37 && triggerRouter.getTrigger().getSourceTableName().toLowerCase().contains("extension"))) {
                    TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, null);
                    if (triggerHistory == null) {
                        Trigger trigger = triggerRouter.getTrigger();
                        triggerHistory = new TriggerHistory(symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false), trigger, symmetricDialect.getTriggerTemplate());
                        triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
                    }
                    Table table = symmetricDialect.getPlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
                    String initialLoadSql = "1=1 order by ";
                    String quote = symmetricDialect.getPlatform().getDdlBuilder().getDatabaseInfo().getDelimiterToken();
                    Column[] pkColumns = table.getPrimaryKeyColumns();
                    for (int j = 0; j < pkColumns.length; j++) {
                        if (j > 0) {
                            initialLoadSql += ", ";
                        }
                        initialLoadSql += quote + pkColumns[j].getName() + quote;
                    }
                    if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
                        initialLoadEvents.add(new SelectFromTableEvent(targetNode, triggerRouter, triggerHistory, initialLoadSql));
                    } else {
                        Data data = new Data(1, null, targetNode.getNodeId(), DataEventType.INSERT, triggerHistory.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
                        initialLoadEvents.add(new SelectFromTableEvent(data));
                    }
                }
            }
        }
        SelectFromTableSource source = new SelectFromTableSource(batch, initialLoadEvents);
        ExtractDataReader dataReader = new ExtractDataReader(this.symmetricDialect.getPlatform(), source);
        ProtocolDataWriter dataWriter = new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity());
        DataProcessor processor = new DataProcessor(dataReader, dataWriter, "configuration extract");
        DataContext ctx = new DataContext();
        ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
        ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
        processor.process(ctx);
        if (triggerRouters.size() == 0) {
            log.error("{} attempted registration, but was sent an empty configuration", targetNode);
        }
    }
}
Also used : TransformTable(org.jumpmind.symmetric.io.data.transform.TransformTable) Table(org.jumpmind.db.model.Table) Node(org.jumpmind.symmetric.model.Node) ArrayList(java.util.ArrayList) Data(org.jumpmind.symmetric.model.Data) DataMetaData(org.jumpmind.symmetric.model.DataMetaData) CsvData(org.jumpmind.symmetric.io.data.CsvData) DataProcessor(org.jumpmind.symmetric.io.data.DataProcessor) TransformPoint(org.jumpmind.symmetric.io.data.transform.TransformPoint) DataContext(org.jumpmind.symmetric.io.data.DataContext) Trigger(org.jumpmind.symmetric.model.Trigger) ProtocolDataWriter(org.jumpmind.symmetric.io.data.writer.ProtocolDataWriter) Batch(org.jumpmind.symmetric.io.data.Batch) OutgoingBatch(org.jumpmind.symmetric.model.OutgoingBatch) Column(org.jumpmind.db.model.Column) PlatformColumn(org.jumpmind.db.model.PlatformColumn) TriggerHistory(org.jumpmind.symmetric.model.TriggerHistory) TriggerRouter(org.jumpmind.symmetric.model.TriggerRouter) TransformTableNodeGroupLink(org.jumpmind.symmetric.service.impl.TransformService.TransformTableNodeGroupLink) NodeGroupLink(org.jumpmind.symmetric.model.NodeGroupLink) ExtractDataReader(org.jumpmind.symmetric.io.data.reader.ExtractDataReader)

Example 67 with Column

use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.

the class OracleBulkDatabaseWriter method flush.

protected void flush() {
    statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
    try {
        if (rowArrays.size() > 0) {
            JdbcSqlTransaction jdbcTransaction = (JdbcSqlTransaction) transaction;
            Connection c = jdbcTransaction.getConnection();
            Connection oracleConnection = jdbcExtractor.getNativeConnection(c);
            Column[] columns = targetTable.getColumns();
            StringBuilder questions = new StringBuilder();
            for (int i = 0; i <= columns.length; i++) {
                questions.append("?, ");
            }
            questions.replace(questions.length() - 2, questions.length(), "");
            String sql = String.format("{ call %s(%s) }", buildProcedureName("i", targetTable), questions);
            OracleCallableStatement stmt = (OracleCallableStatement) oracleConnection.prepareCall(sql);
            for (int i = 0; i < columns.length; i++) {
                Column column = columns[i];
                ArrayDescriptor type = ArrayDescriptor.createDescriptor(getTypeName(column.getMappedTypeCode()), oracleConnection);
                List<Object> columnData = rowArrays.get(i);
                ARRAY array = new ARRAY(type, oracleConnection, columnData.toArray(new Object[columnData.size()]));
                stmt.setObject(i + 1, array);
            }
            int errorIndex = columns.length + 1;
            stmt.registerOutParameter(errorIndex, OracleTypes.ARRAY, getTypeName(Types.INTEGER));
            stmt.execute();
            ARRAY errorsArray = stmt.getARRAY(errorIndex);
            int[] errors;
            if (errorsArray != null) {
                errors = errorsArray.getIntArray();
            } else {
                errors = new int[0];
            }
            if (errors.length > 0) {
                // set the statement count so the failed row number get reported correctly
                statistics.get(batch).set(DataWriterStatisticConstants.STATEMENTCOUNT, errors[0]);
                throw new BulkSqlException(errors, lastEventType.toString(), sql);
            }
        }
    } catch (SQLException ex) {
        throw platform.getSqlTemplate().translate(ex);
    } finally {
        lastEventType = null;
        rowArrays.clear();
        statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
    }
}
Also used : ARRAY(oracle.sql.ARRAY) SQLException(java.sql.SQLException) Connection(java.sql.Connection) OracleCallableStatement(oracle.jdbc.internal.OracleCallableStatement) BulkSqlException(org.jumpmind.db.sql.BulkSqlException) Column(org.jumpmind.db.model.Column) ArrayDescriptor(oracle.sql.ArrayDescriptor) JdbcSqlTransaction(org.jumpmind.db.sql.JdbcSqlTransaction)

Example 68 with Column

use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.

the class OracleBulkDatabaseWriter method getBulkLoadableColumns.

protected List<Column> getBulkLoadableColumns(Table table) {
    ArrayList<Column> columns = new ArrayList<Column>(Arrays.asList(table.getColumns()));
    Iterator<Column> iterator = columns.iterator();
    // TODO support BLOB and CLOBs in bulk load. For now, remove them
    while (iterator.hasNext()) {
        Column column = (Column) iterator.next();
        if (column.getMappedTypeCode() == Types.CLOB || column.getMappedTypeCode() == Types.BLOB || column.getMappedTypeCode() == Types.VARBINARY) {
            iterator.remove();
        }
    }
    return columns;
}
Also used : Column(org.jumpmind.db.model.Column) ArrayList(java.util.ArrayList)

Example 69 with Column

use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.

the class MySqlBulkDatabaseWriter method write.

public void write(CsvData data) {
    DataEventType dataEventType = data.getDataEventType();
    switch(dataEventType) {
        case INSERT:
            statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT);
            statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER);
            statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
            try {
                String[] parsedData = data.getParsedData(CsvData.ROW_DATA);
                byte[] byteData = null;
                if (needsBinaryConversion) {
                    ByteArrayOutputStream out = new ByteArrayOutputStream();
                    CsvWriter writer = new CsvWriter(new OutputStreamWriter(out), ',');
                    writer.setEscapeMode(CsvWriter.ESCAPE_MODE_BACKSLASH);
                    writer.setRecordDelimiter('\n');
                    writer.setTextQualifier('"');
                    writer.setUseTextQualifier(true);
                    writer.setForceQualifier(true);
                    writer.setNullString("\\N");
                    Column[] columns = targetTable.getColumns();
                    for (int i = 0; i < columns.length; i++) {
                        if (columns[i].isOfBinaryType() && parsedData[i] != null) {
                            if (i > 0) {
                                out.write(',');
                            }
                            out.write('"');
                            if (batch.getBinaryEncoding().equals(BinaryEncoding.HEX)) {
                                out.write(escape(Hex.decodeHex(parsedData[i].toCharArray())));
                            } else if (batch.getBinaryEncoding().equals(BinaryEncoding.BASE64)) {
                                out.write(escape(Base64.decodeBase64(parsedData[i].getBytes())));
                            }
                            out.write('"');
                        } else {
                            writer.write(parsedData[i], true);
                            writer.flush();
                        }
                    }
                    writer.endRecord();
                    writer.close();
                    byteData = out.toByteArray();
                } else {
                    String formattedData = CsvUtils.escapeCsvData(parsedData, '\n', '"', CsvWriter.ESCAPE_MODE_BACKSLASH, "\\N");
                    byteData = formattedData.getBytes();
                }
                this.stagedInputFile.getOutputStream().write(byteData);
                loadedRows++;
                loadedBytes += byteData.length;
            } catch (Exception ex) {
                throw getPlatform().getSqlTemplate().translate(ex);
            } finally {
                statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
            }
            break;
        case UPDATE:
        case DELETE:
        default:
            flush();
            super.write(data);
            break;
    }
    if (loadedRows >= maxRowsBeforeFlush || loadedBytes >= maxBytesBeforeFlush) {
        flush();
    }
}
Also used : CsvWriter(org.jumpmind.symmetric.csv.CsvWriter) Column(org.jumpmind.db.model.Column) DataEventType(org.jumpmind.symmetric.io.data.DataEventType) OutputStreamWriter(java.io.OutputStreamWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SQLException(java.sql.SQLException)

Example 70 with Column

use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.

the class PostgresBulkDatabaseWriter method createCopyMgrSql.

private String createCopyMgrSql() {
    StringBuilder sql = new StringBuilder("COPY ");
    DatabaseInfo dbInfo = platform.getDatabaseInfo();
    String quote = dbInfo.getDelimiterToken();
    String catalogSeparator = dbInfo.getCatalogSeparator();
    String schemaSeparator = dbInfo.getSchemaSeparator();
    sql.append(targetTable.getQualifiedTableName(quote, catalogSeparator, schemaSeparator));
    sql.append("(");
    Column[] columns = targetTable.getColumns();
    for (Column column : columns) {
        String columnName = column.getName();
        if (StringUtils.isNotBlank(columnName)) {
            sql.append(quote);
            sql.append(columnName);
            sql.append(quote);
            sql.append(",");
        }
    }
    sql.replace(sql.length() - 1, sql.length(), ")");
    sql.append("FROM STDIN with delimiter ',' csv quote ''''");
    return sql.toString();
}
Also used : DatabaseInfo(org.jumpmind.db.platform.DatabaseInfo) Column(org.jumpmind.db.model.Column)

Aggregations

Column (org.jumpmind.db.model.Column)179 Table (org.jumpmind.db.model.Table)78 ArrayList (java.util.ArrayList)34 IndexColumn (org.jumpmind.db.model.IndexColumn)23 PlatformColumn (org.jumpmind.db.model.PlatformColumn)21 Test (org.junit.Test)16 Row (org.jumpmind.db.sql.Row)15 LinkedHashMap (java.util.LinkedHashMap)12 ResultSet (java.sql.ResultSet)11 DmlStatement (org.jumpmind.db.sql.DmlStatement)10 SqlException (org.jumpmind.db.sql.SqlException)10 InvocationTargetException (java.lang.reflect.InvocationTargetException)9 IIndex (org.jumpmind.db.model.IIndex)9 HashMap (java.util.HashMap)8 ForeignKey (org.jumpmind.db.model.ForeignKey)8 CsvData (org.jumpmind.symmetric.io.data.CsvData)8 PreparedStatement (java.sql.PreparedStatement)7 IOException (java.io.IOException)6 SQLException (java.sql.SQLException)6 Reference (org.jumpmind.db.model.Reference)6