Search in sources :

Example 1 with Statistics

use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.

the class MultiBatchStagingWriter method end.

@Override
public void end(Table table) {
    if (this.currentDataWriter != null) {
        this.currentDataWriter.end(table);
        Statistics stats = this.currentDataWriter.getStatistics().get(batch);
        this.outgoingBatch.setByteCount(stats.get(DataWriterStatisticConstants.BYTECOUNT));
        this.outgoingBatch.setExtractMillis(System.currentTimeMillis() - batch.getStartTime().getTime());
    }
}
Also used : Statistics(org.jumpmind.util.Statistics)

Example 2 with Statistics

use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.

the class MultiBatchStagingWriter method closeCurrentDataWriter.

private void closeCurrentDataWriter() {
    if (this.currentDataWriter != null) {
        Statistics stats = this.currentDataWriter.getStatistics().get(batch);
        this.outgoingBatch.setByteCount(stats.get(DataWriterStatisticConstants.BYTECOUNT));
        this.outgoingBatch.setExtractMillis(System.currentTimeMillis() - batch.getStartTime().getTime());
        this.currentDataWriter.close();
        this.currentDataWriter = null;
        checkSend();
    }
}
Also used : Statistics(org.jumpmind.util.Statistics)

Example 3 with Statistics

use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.

the class ProtocolDataReader method readNext.

public Object readNext() {
    try {
        Set<String> keys = null;
        String schemaName = null;
        String catalogName = null;
        String[] parsedOldData = null;
        long bytesRead = 0;
        Table table = null;
        while (tokens != null || csvReader.readRecord()) {
            lineNumber++;
            context.put(CTX_LINE_NUMBER, lineNumber);
            if (tokens == null) {
                tokens = csvReader.getValues();
            }
            bytesRead += logDebugAndCountBytes(tokens);
            Statistics stats = null;
            if (batch != null) {
                stats = statistics.get(batch);
                stats.increment(DataReaderStatistics.READ_BYTE_COUNT, bytesRead);
                bytesRead = 0;
            }
            if (table != null && !(tokens[0].equals(CsvConstants.TABLE) || tokens[0].equals(CsvConstants.KEYS) || tokens[0].equals(CsvConstants.COLUMNS))) {
                return table;
            }
            if (stats != null && (tokens[0].equals(CsvConstants.INSERT) || tokens[0].equals(CsvConstants.UPDATE) || tokens[0].equals(CsvConstants.DELETE))) {
                stats.increment(DataReaderStatistics.READ_RECORD_COUNT, 1);
            }
            if (tokens[0].equals(CsvConstants.INSERT)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.INSERT);
                data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.OLD)) {
                parsedOldData = CollectionUtils.copyOfRange(tokens, 1, tokens.length);
            } else if (tokens[0].equals(CsvConstants.UPDATE)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.UPDATE);
                int columnCount = context.getLastParsedTable().getColumnCount();
                if (tokens.length <= columnCount) {
                    String msg = String.format("Invalid state while parsing csv data.  " + "The number of columns (%d) reported for table '%s' doesn't match up with the token count (%d) data: %s", columnCount, context.getLastParsedTable().getFullyQualifiedTableName(), tokens.length, ArrayUtils.toString(tokens));
                    throw new IllegalStateException(msg);
                }
                data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, columnCount + 1));
                data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, columnCount + 1, tokens.length));
                data.putParsedData(CsvData.OLD_DATA, parsedOldData);
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.DELETE)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.DELETE);
                data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
                data.putParsedData(CsvData.OLD_DATA, parsedOldData);
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.BATCH) || tokens[0].equals(CsvConstants.RETRY)) {
                Batch batch = new Batch(batchType, Long.parseLong(tokens[1]), channelId, binaryEncoding, sourceNodeId, targetNodeId, false);
                statistics.put(batch, new DataReaderStatistics());
                tokens = null;
                return batch;
            } else if (tokens[0].equals(CsvConstants.NO_BINARY_OLD_DATA)) {
                if (tokens.length > 1) {
                    noBinaryOldData = Boolean.parseBoolean(tokens[1]);
                }
            } else if (tokens[0].equals(CsvConstants.NODEID)) {
                this.sourceNodeId = tokens[1];
            } else if (tokens[0].equals(CsvConstants.BINARY)) {
                this.binaryEncoding = BinaryEncoding.valueOf(tokens[1]);
            } else if (tokens[0].equals(CsvConstants.CHANNEL)) {
                this.channelId = tokens[1];
            } else if (tokens[0].equals(CsvConstants.SCHEMA)) {
                schemaName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
            } else if (tokens[0].equals(CsvConstants.CATALOG)) {
                catalogName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
            } else if (tokens[0].equals(CsvConstants.TABLE)) {
                String tableName = tokens[1];
                table = context.getParsedTables().get(Table.getFullyQualifiedTableName(catalogName, schemaName, tableName));
                if (table != null) {
                    context.setLastParsedTable(table);
                } else {
                    table = new Table(catalogName, schemaName, tableName);
                    context.setLastParsedTable(table);
                }
            } else if (tokens[0].equals(CsvConstants.KEYS)) {
                if (keys == null) {
                    keys = new HashSet<String>(tokens.length);
                }
                for (int i = 1; i < tokens.length; i++) {
                    keys.add(tokens[i]);
                }
            } else if (tokens[0].equals(CsvConstants.COLUMNS)) {
                table.removeAllColumns();
                for (int i = 1; i < tokens.length; i++) {
                    Column column = new Column(tokens[i], keys != null && keys.contains(tokens[i]));
                    table.addColumn(column);
                }
                context.getParsedTables().put(table.getFullyQualifiedTableName(), table);
            } else if (tokens[0].equals(CsvConstants.COMMIT)) {
                if (batch != null) {
                    batch.setComplete(true);
                }
                tokens = null;
                return null;
            } else if (tokens[0].equals(CsvConstants.SQL)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.SQL);
                data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.BSH)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.BSH);
                data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.CREATE)) {
                CsvData data = new CsvData();
                data.setNoBinaryOldData(noBinaryOldData);
                data.setDataEventType(DataEventType.CREATE);
                data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
                tokens = null;
                return data;
            } else if (tokens[0].equals(CsvConstants.IGNORE)) {
                if (batch != null) {
                    batch.setIgnored(true);
                }
            } else {
                log.info("Unable to handle unknown csv values: " + Arrays.toString(tokens));
            }
            tokens = null;
        }
    } catch (IOException ex) {
        throw new IoException(ex);
    }
    return null;
}
Also used : Table(org.jumpmind.db.model.Table) IOException(java.io.IOException) Statistics(org.jumpmind.util.Statistics) CsvData(org.jumpmind.symmetric.io.data.CsvData) Batch(org.jumpmind.symmetric.io.data.Batch) Column(org.jumpmind.db.model.Column) IoException(org.jumpmind.exception.IoException)

Example 4 with Statistics

use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.

the class AbstractDatabaseWriterConflictResolver method needsResolved.

public void needsResolved(AbstractDatabaseWriter writer, CsvData data, LoadStatus loadStatus) {
    DataEventType originalEventType = data.getDataEventType();
    DatabaseWriterSettings writerSettings = writer.getWriterSettings();
    Conflict conflict = writerSettings.pickConflict(writer.getTargetTable(), writer.getBatch());
    Statistics statistics = writer.getStatistics().get(writer.getBatch());
    long statementCount = statistics.get(DataWriterStatisticConstants.STATEMENTCOUNT);
    long lineNumber = statistics.get(DataWriterStatisticConstants.LINENUMBER);
    ResolvedData resolvedData = writerSettings.getResolvedData(statementCount);
    logConflictHappened(conflict, data, writer, resolvedData, lineNumber);
    switch(originalEventType) {
        case INSERT:
            if (resolvedData != null) {
                attemptToResolve(resolvedData, data, writer, conflict);
            } else {
                switch(conflict.getResolveType()) {
                    case FALLBACK:
                        performFallbackToUpdate(writer, data, conflict, true);
                        break;
                    case NEWER_WINS:
                        if ((conflict.getDetectType() == DetectConflict.USE_TIMESTAMP && isTimestampNewer(conflict, writer, data)) || (conflict.getDetectType() == DetectConflict.USE_VERSION && isVersionNewer(conflict, writer, data))) {
                            performFallbackToUpdate(writer, data, conflict, true);
                        } else {
                            if (!conflict.isResolveRowOnly()) {
                                throw new IgnoreBatchException();
                            }
                        }
                        break;
                    case IGNORE:
                        ignore(writer, conflict);
                        break;
                    case MANUAL:
                    default:
                        attemptToResolve(resolvedData, data, writer, conflict);
                        break;
                }
            }
            break;
        case UPDATE:
            if (resolvedData != null) {
                attemptToResolve(resolvedData, data, writer, conflict);
            } else {
                switch(conflict.getResolveType()) {
                    case FALLBACK:
                        if (conflict.getDetectType() == DetectConflict.USE_PK_DATA) {
                            CsvData withoutOldData = data.copyWithoutOldData();
                            try {
                                // we already tried to update using the pk
                                performFallbackToInsert(writer, withoutOldData, conflict, true);
                            } catch (ConflictException ex) {
                                performFallbackToUpdate(writer, withoutOldData, conflict, true);
                            }
                        } else {
                            try {
                                performFallbackToUpdate(writer, data, conflict, true);
                            } catch (ConflictException ex) {
                                performFallbackToInsert(writer, data, conflict, true);
                            }
                        }
                        break;
                    case NEWER_WINS:
                        if ((conflict.getDetectType() == DetectConflict.USE_TIMESTAMP && isTimestampNewer(conflict, writer, data)) || (conflict.getDetectType() == DetectConflict.USE_VERSION && isVersionNewer(conflict, writer, data))) {
                            try {
                                performFallbackToUpdate(writer, data, conflict, false);
                            } catch (ConflictException ex) {
                                performFallbackToInsert(writer, data, conflict, true);
                            }
                        } else {
                            if (!conflict.isResolveRowOnly()) {
                                throw new IgnoreBatchException();
                            }
                        }
                        break;
                    case IGNORE:
                        ignore(writer, conflict);
                        break;
                    case MANUAL:
                    default:
                        attemptToResolve(resolvedData, data, writer, conflict);
                        break;
                }
            }
            break;
        case DELETE:
            switch(conflict.getResolveType()) {
                case FALLBACK:
                    LoadStatus status = LoadStatus.CONFLICT;
                    if (conflict.getDetectType() != DetectConflict.USE_PK_DATA) {
                        status = writer.delete(data, false);
                    }
                    if (status == LoadStatus.CONFLICT) {
                        writer.getStatistics().get(writer.getBatch()).increment(DataWriterStatisticConstants.MISSINGDELETECOUNT);
                    }
                    break;
                case IGNORE:
                    ignore(writer, conflict);
                    break;
                case NEWER_WINS:
                    // nothing to do ...
                    break;
                case MANUAL:
                default:
                    if (resolvedData != null) {
                        if (!resolvedData.isIgnoreRow()) {
                            writer.delete(data, false);
                        } else {
                            if (!conflict.isResolveRowOnly()) {
                                throw new IgnoreBatchException();
                            }
                        }
                    } else {
                        throw new ConflictException(data, writer.getTargetTable(), false, conflict, (Exception) writer.getContext().get(AbstractDatabaseWriter.CONFLICT_ERROR));
                    }
                    break;
            }
            break;
        default:
            break;
    }
    logConflictResolution(conflict, data, writer, resolvedData, lineNumber);
}
Also used : DetectConflict(org.jumpmind.symmetric.io.data.writer.Conflict.DetectConflict) LoadStatus(org.jumpmind.symmetric.io.data.writer.AbstractDatabaseWriter.LoadStatus) DataEventType(org.jumpmind.symmetric.io.data.DataEventType) Statistics(org.jumpmind.util.Statistics) CsvData(org.jumpmind.symmetric.io.data.CsvData)

Example 5 with Statistics

use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.

the class DatabaseWriterTest method testUpdateDetectOldDataManual.

@Test
public void testUpdateDetectOldDataManual() {
    Conflict setting = new Conflict();
    setting.setConflictId("unit.test");
    setting.setDetectType(DetectConflict.USE_OLD_DATA);
    setting.setResolveRowOnly(false);
    setting.setResolveChangesOnly(false);
    setting.setResolveType(ResolveConflict.MANUAL);
    writerSettings.setDefaultConflictSetting(setting);
    String origId = getNextId();
    String[] originalValues = massageExpectectedResultsForDialect(new String[] { origId, "string2", "changed value", "char2", "char not null2", "2007-01-02 03:20:10.000", "2012-03-12 07:00:00.000", "0", "2", "67.89", "-0.0747663" });
    CsvData data = new CsvData(DataEventType.INSERT, originalValues);
    writeData(data, originalValues);
    String[] oldData = CollectionUtils.copyOfRange(originalValues, 0, originalValues.length);
    oldData[2] = "original value";
    oldData = massageExpectectedResultsForDialect(oldData);
    String[] newData = CollectionUtils.copyOfRange(originalValues, 0, originalValues.length);
    newData[2] = "new value";
    newData = massageExpectectedResultsForDialect(newData);
    CsvData update = new CsvData(DataEventType.UPDATE);
    update.putParsedData(CsvData.ROW_DATA, newData);
    update.putParsedData(CsvData.OLD_DATA, oldData);
    try {
        writeData(update);
        Assert.fail("Should have received a conflict exception");
    } catch (ConflictException ex) {
        Statistics stats = lastDataWriterUsed.getStatistics().values().iterator().next();
        long statementNumber = stats.get(DataWriterStatisticConstants.STATEMENTCOUNT);
        ResolvedData resolvedData = new ResolvedData(statementNumber, update.getCsvData(CsvData.ROW_DATA), false);
        writerSettings.setResolvedData(resolvedData);
        writeData(update);
        Map<String, Object> row = queryForRow(origId);
        Assert.assertNotNull(row);
        Assert.assertEquals(newData[2], row.get("string_required_value"));
    }
}
Also used : ResolveConflict(org.jumpmind.symmetric.io.data.writer.Conflict.ResolveConflict) DetectConflict(org.jumpmind.symmetric.io.data.writer.Conflict.DetectConflict) Statistics(org.jumpmind.util.Statistics) Map(java.util.Map) CsvData(org.jumpmind.symmetric.io.data.CsvData) AbstractWriterTest(org.jumpmind.symmetric.io.AbstractWriterTest) Test(org.junit.Test)

Aggregations

Statistics (org.jumpmind.util.Statistics)16 CsvData (org.jumpmind.symmetric.io.data.CsvData)6 IoException (org.jumpmind.exception.IoException)3 DataContext (org.jumpmind.symmetric.io.data.DataContext)3 DetectConflict (org.jumpmind.symmetric.io.data.writer.Conflict.DetectConflict)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 Map (java.util.Map)2 Table (org.jumpmind.db.model.Table)2 AbstractWriterTest (org.jumpmind.symmetric.io.AbstractWriterTest)2 Batch (org.jumpmind.symmetric.io.data.Batch)2 DataEventType (org.jumpmind.symmetric.io.data.DataEventType)2 DataProcessor (org.jumpmind.symmetric.io.data.DataProcessor)2 IDataReader (org.jumpmind.symmetric.io.data.IDataReader)2 DataReaderStatistics (org.jumpmind.symmetric.io.data.reader.DataReaderStatistics)2 ResolveConflict (org.jumpmind.symmetric.io.data.writer.Conflict.ResolveConflict)2 IgnoreBatchException (org.jumpmind.symmetric.io.data.writer.IgnoreBatchException)2 IStagedResource (org.jumpmind.symmetric.io.stage.IStagedResource)2 Node (org.jumpmind.symmetric.model.Node)2 Test (org.junit.Test)2