use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.
the class MultiBatchStagingWriter method end.
@Override
public void end(Table table) {
if (this.currentDataWriter != null) {
this.currentDataWriter.end(table);
Statistics stats = this.currentDataWriter.getStatistics().get(batch);
this.outgoingBatch.setByteCount(stats.get(DataWriterStatisticConstants.BYTECOUNT));
this.outgoingBatch.setExtractMillis(System.currentTimeMillis() - batch.getStartTime().getTime());
}
}
use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.
the class MultiBatchStagingWriter method closeCurrentDataWriter.
private void closeCurrentDataWriter() {
if (this.currentDataWriter != null) {
Statistics stats = this.currentDataWriter.getStatistics().get(batch);
this.outgoingBatch.setByteCount(stats.get(DataWriterStatisticConstants.BYTECOUNT));
this.outgoingBatch.setExtractMillis(System.currentTimeMillis() - batch.getStartTime().getTime());
this.currentDataWriter.close();
this.currentDataWriter = null;
checkSend();
}
}
use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.
the class ProtocolDataReader method readNext.
public Object readNext() {
try {
Set<String> keys = null;
String schemaName = null;
String catalogName = null;
String[] parsedOldData = null;
long bytesRead = 0;
Table table = null;
while (tokens != null || csvReader.readRecord()) {
lineNumber++;
context.put(CTX_LINE_NUMBER, lineNumber);
if (tokens == null) {
tokens = csvReader.getValues();
}
bytesRead += logDebugAndCountBytes(tokens);
Statistics stats = null;
if (batch != null) {
stats = statistics.get(batch);
stats.increment(DataReaderStatistics.READ_BYTE_COUNT, bytesRead);
bytesRead = 0;
}
if (table != null && !(tokens[0].equals(CsvConstants.TABLE) || tokens[0].equals(CsvConstants.KEYS) || tokens[0].equals(CsvConstants.COLUMNS))) {
return table;
}
if (stats != null && (tokens[0].equals(CsvConstants.INSERT) || tokens[0].equals(CsvConstants.UPDATE) || tokens[0].equals(CsvConstants.DELETE))) {
stats.increment(DataReaderStatistics.READ_RECORD_COUNT, 1);
}
if (tokens[0].equals(CsvConstants.INSERT)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.INSERT);
data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.OLD)) {
parsedOldData = CollectionUtils.copyOfRange(tokens, 1, tokens.length);
} else if (tokens[0].equals(CsvConstants.UPDATE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.UPDATE);
int columnCount = context.getLastParsedTable().getColumnCount();
if (tokens.length <= columnCount) {
String msg = String.format("Invalid state while parsing csv data. " + "The number of columns (%d) reported for table '%s' doesn't match up with the token count (%d) data: %s", columnCount, context.getLastParsedTable().getFullyQualifiedTableName(), tokens.length, ArrayUtils.toString(tokens));
throw new IllegalStateException(msg);
}
data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, columnCount + 1));
data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, columnCount + 1, tokens.length));
data.putParsedData(CsvData.OLD_DATA, parsedOldData);
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.DELETE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.DELETE);
data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
data.putParsedData(CsvData.OLD_DATA, parsedOldData);
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.BATCH) || tokens[0].equals(CsvConstants.RETRY)) {
Batch batch = new Batch(batchType, Long.parseLong(tokens[1]), channelId, binaryEncoding, sourceNodeId, targetNodeId, false);
statistics.put(batch, new DataReaderStatistics());
tokens = null;
return batch;
} else if (tokens[0].equals(CsvConstants.NO_BINARY_OLD_DATA)) {
if (tokens.length > 1) {
noBinaryOldData = Boolean.parseBoolean(tokens[1]);
}
} else if (tokens[0].equals(CsvConstants.NODEID)) {
this.sourceNodeId = tokens[1];
} else if (tokens[0].equals(CsvConstants.BINARY)) {
this.binaryEncoding = BinaryEncoding.valueOf(tokens[1]);
} else if (tokens[0].equals(CsvConstants.CHANNEL)) {
this.channelId = tokens[1];
} else if (tokens[0].equals(CsvConstants.SCHEMA)) {
schemaName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
} else if (tokens[0].equals(CsvConstants.CATALOG)) {
catalogName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
} else if (tokens[0].equals(CsvConstants.TABLE)) {
String tableName = tokens[1];
table = context.getParsedTables().get(Table.getFullyQualifiedTableName(catalogName, schemaName, tableName));
if (table != null) {
context.setLastParsedTable(table);
} else {
table = new Table(catalogName, schemaName, tableName);
context.setLastParsedTable(table);
}
} else if (tokens[0].equals(CsvConstants.KEYS)) {
if (keys == null) {
keys = new HashSet<String>(tokens.length);
}
for (int i = 1; i < tokens.length; i++) {
keys.add(tokens[i]);
}
} else if (tokens[0].equals(CsvConstants.COLUMNS)) {
table.removeAllColumns();
for (int i = 1; i < tokens.length; i++) {
Column column = new Column(tokens[i], keys != null && keys.contains(tokens[i]));
table.addColumn(column);
}
context.getParsedTables().put(table.getFullyQualifiedTableName(), table);
} else if (tokens[0].equals(CsvConstants.COMMIT)) {
if (batch != null) {
batch.setComplete(true);
}
tokens = null;
return null;
} else if (tokens[0].equals(CsvConstants.SQL)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.SQL);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.BSH)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.BSH);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.CREATE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.CREATE);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.IGNORE)) {
if (batch != null) {
batch.setIgnored(true);
}
} else {
log.info("Unable to handle unknown csv values: " + Arrays.toString(tokens));
}
tokens = null;
}
} catch (IOException ex) {
throw new IoException(ex);
}
return null;
}
use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.
the class AbstractDatabaseWriterConflictResolver method needsResolved.
public void needsResolved(AbstractDatabaseWriter writer, CsvData data, LoadStatus loadStatus) {
DataEventType originalEventType = data.getDataEventType();
DatabaseWriterSettings writerSettings = writer.getWriterSettings();
Conflict conflict = writerSettings.pickConflict(writer.getTargetTable(), writer.getBatch());
Statistics statistics = writer.getStatistics().get(writer.getBatch());
long statementCount = statistics.get(DataWriterStatisticConstants.STATEMENTCOUNT);
long lineNumber = statistics.get(DataWriterStatisticConstants.LINENUMBER);
ResolvedData resolvedData = writerSettings.getResolvedData(statementCount);
logConflictHappened(conflict, data, writer, resolvedData, lineNumber);
switch(originalEventType) {
case INSERT:
if (resolvedData != null) {
attemptToResolve(resolvedData, data, writer, conflict);
} else {
switch(conflict.getResolveType()) {
case FALLBACK:
performFallbackToUpdate(writer, data, conflict, true);
break;
case NEWER_WINS:
if ((conflict.getDetectType() == DetectConflict.USE_TIMESTAMP && isTimestampNewer(conflict, writer, data)) || (conflict.getDetectType() == DetectConflict.USE_VERSION && isVersionNewer(conflict, writer, data))) {
performFallbackToUpdate(writer, data, conflict, true);
} else {
if (!conflict.isResolveRowOnly()) {
throw new IgnoreBatchException();
}
}
break;
case IGNORE:
ignore(writer, conflict);
break;
case MANUAL:
default:
attemptToResolve(resolvedData, data, writer, conflict);
break;
}
}
break;
case UPDATE:
if (resolvedData != null) {
attemptToResolve(resolvedData, data, writer, conflict);
} else {
switch(conflict.getResolveType()) {
case FALLBACK:
if (conflict.getDetectType() == DetectConflict.USE_PK_DATA) {
CsvData withoutOldData = data.copyWithoutOldData();
try {
// we already tried to update using the pk
performFallbackToInsert(writer, withoutOldData, conflict, true);
} catch (ConflictException ex) {
performFallbackToUpdate(writer, withoutOldData, conflict, true);
}
} else {
try {
performFallbackToUpdate(writer, data, conflict, true);
} catch (ConflictException ex) {
performFallbackToInsert(writer, data, conflict, true);
}
}
break;
case NEWER_WINS:
if ((conflict.getDetectType() == DetectConflict.USE_TIMESTAMP && isTimestampNewer(conflict, writer, data)) || (conflict.getDetectType() == DetectConflict.USE_VERSION && isVersionNewer(conflict, writer, data))) {
try {
performFallbackToUpdate(writer, data, conflict, false);
} catch (ConflictException ex) {
performFallbackToInsert(writer, data, conflict, true);
}
} else {
if (!conflict.isResolveRowOnly()) {
throw new IgnoreBatchException();
}
}
break;
case IGNORE:
ignore(writer, conflict);
break;
case MANUAL:
default:
attemptToResolve(resolvedData, data, writer, conflict);
break;
}
}
break;
case DELETE:
switch(conflict.getResolveType()) {
case FALLBACK:
LoadStatus status = LoadStatus.CONFLICT;
if (conflict.getDetectType() != DetectConflict.USE_PK_DATA) {
status = writer.delete(data, false);
}
if (status == LoadStatus.CONFLICT) {
writer.getStatistics().get(writer.getBatch()).increment(DataWriterStatisticConstants.MISSINGDELETECOUNT);
}
break;
case IGNORE:
ignore(writer, conflict);
break;
case NEWER_WINS:
// nothing to do ...
break;
case MANUAL:
default:
if (resolvedData != null) {
if (!resolvedData.isIgnoreRow()) {
writer.delete(data, false);
} else {
if (!conflict.isResolveRowOnly()) {
throw new IgnoreBatchException();
}
}
} else {
throw new ConflictException(data, writer.getTargetTable(), false, conflict, (Exception) writer.getContext().get(AbstractDatabaseWriter.CONFLICT_ERROR));
}
break;
}
break;
default:
break;
}
logConflictResolution(conflict, data, writer, resolvedData, lineNumber);
}
use of org.jumpmind.util.Statistics in project symmetric-ds by JumpMind.
the class DatabaseWriterTest method testUpdateDetectOldDataManual.
@Test
public void testUpdateDetectOldDataManual() {
Conflict setting = new Conflict();
setting.setConflictId("unit.test");
setting.setDetectType(DetectConflict.USE_OLD_DATA);
setting.setResolveRowOnly(false);
setting.setResolveChangesOnly(false);
setting.setResolveType(ResolveConflict.MANUAL);
writerSettings.setDefaultConflictSetting(setting);
String origId = getNextId();
String[] originalValues = massageExpectectedResultsForDialect(new String[] { origId, "string2", "changed value", "char2", "char not null2", "2007-01-02 03:20:10.000", "2012-03-12 07:00:00.000", "0", "2", "67.89", "-0.0747663" });
CsvData data = new CsvData(DataEventType.INSERT, originalValues);
writeData(data, originalValues);
String[] oldData = CollectionUtils.copyOfRange(originalValues, 0, originalValues.length);
oldData[2] = "original value";
oldData = massageExpectectedResultsForDialect(oldData);
String[] newData = CollectionUtils.copyOfRange(originalValues, 0, originalValues.length);
newData[2] = "new value";
newData = massageExpectectedResultsForDialect(newData);
CsvData update = new CsvData(DataEventType.UPDATE);
update.putParsedData(CsvData.ROW_DATA, newData);
update.putParsedData(CsvData.OLD_DATA, oldData);
try {
writeData(update);
Assert.fail("Should have received a conflict exception");
} catch (ConflictException ex) {
Statistics stats = lastDataWriterUsed.getStatistics().values().iterator().next();
long statementNumber = stats.get(DataWriterStatisticConstants.STATEMENTCOUNT);
ResolvedData resolvedData = new ResolvedData(statementNumber, update.getCsvData(CsvData.ROW_DATA), false);
writerSettings.setResolvedData(resolvedData);
writeData(update);
Map<String, Object> row = queryForRow(origId);
Assert.assertNotNull(row);
Assert.assertEquals(newData[2], row.get("string_required_value"));
}
}
Aggregations