use of org.jumpmind.symmetric.io.data.DataEventType in project symmetric-ds by JumpMind.
the class TemplatedPublisherDataLoaderFilter method fillOutTemplate.
protected String fillOutTemplate(Table table, CsvData data, String template, DataContext context) {
DataEventType eventType = data.getDataEventType();
String[] colNames = null;
String[] colValues = null;
if (eventType == DataEventType.DELETE) {
colNames = table.getPrimaryKeyColumnNames();
colValues = data.getParsedData(CsvData.PK_DATA);
} else {
colNames = table.getColumnNames();
colValues = data.getParsedData(CsvData.ROW_DATA);
}
for (int i = 0; i < colValues.length; i++) {
String col = colNames[i];
template = replace(template, col, format(col, colValues[i]));
}
template = template.replace("DMLTYPE", eventType.name());
template = template.replace("TIMESTAMP", Long.toString(System.currentTimeMillis()));
return template;
}
use of org.jumpmind.symmetric.io.data.DataEventType in project symmetric-ds by JumpMind.
the class TransformWriter method perform.
protected boolean perform(DataContext context, TransformedData data, TransformTable transformation, Map<String, String> sourceValues, Map<String, String> oldSourceValues) throws IgnoreRowException {
boolean persistData = false;
try {
DataEventType eventType = data.getSourceDmlType();
for (TransformColumn transformColumn : transformation.getTransformColumns()) {
if (!transformColumn.isPk()) {
IncludeOnType includeOn = transformColumn.getIncludeOn();
if (includeOn == IncludeOnType.ALL || (includeOn == IncludeOnType.INSERT && eventType == DataEventType.INSERT) || (includeOn == IncludeOnType.UPDATE && eventType == DataEventType.UPDATE) || (includeOn == IncludeOnType.DELETE && eventType == DataEventType.DELETE)) {
if (StringUtils.isBlank(transformColumn.getSourceColumnName()) || sourceValues.containsKey(transformColumn.getSourceColumnName())) {
try {
Object value = transformColumn(context, data, transformColumn, sourceValues, oldSourceValues);
if (value instanceof NewAndOldValue) {
data.put(transformColumn, ((NewAndOldValue) value).getNewValue(), oldSourceValues != null ? ((NewAndOldValue) value).getOldValue() : null, false);
} else if (value == null || value instanceof String) {
data.put(transformColumn, (String) value, null, false);
} else if (value instanceof List) {
throw new IllegalStateException(String.format("Column transform failed %s.%s. Transforms that multiply rows must be marked as part of the primary key", transformColumn.getTransformId(), transformColumn.getTargetColumnName()));
} else {
throw new IllegalStateException(String.format("Column transform failed %s.%s. It returned an unexpected type of %s", transformColumn.getTransformId(), transformColumn.getTargetColumnName(), value.getClass().getSimpleName()));
}
} catch (IgnoreColumnException e) {
// Do nothing. We are ignoring the column
if (log.isDebugEnabled()) {
log.debug("A transform indicated we should ignore the target column {}", transformColumn.getTargetColumnName());
}
}
} else {
if (eventType != DataEventType.DELETE) {
log.warn("Could not find a source column of {} for the transformation: {}", transformColumn.getSourceColumnName(), transformation.getTransformId());
} else {
log.debug("Could not find a source column of {} for the transformation: {}. This is probably because this was a DELETE event and no old data was captured.", transformColumn.getSourceColumnName(), transformation.getTransformId());
}
}
}
}
}
// transformation
if (data.getColumnNames().length > 0) {
TargetDmlAction targetAction = null;
switch(data.getTargetDmlType()) {
case INSERT:
targetAction = TargetDmlAction.INS_ROW;
break;
case UPDATE:
targetAction = transformation.evaluateTargetDmlAction(context, data);
break;
case DELETE:
targetAction = transformation.getDeleteAction();
break;
default:
persistData = true;
}
if (targetAction != null) {
// how to handle the update/delete action on target..
switch(targetAction) {
case DEL_ROW:
data.setTargetDmlType(DataEventType.DELETE);
persistData = true;
break;
case UPDATE_COL:
case UPD_ROW:
data.setTargetDmlType(DataEventType.UPDATE);
persistData = true;
break;
case INS_ROW:
data.setTargetDmlType(DataEventType.INSERT);
persistData = true;
break;
case NONE:
default:
if (log.isDebugEnabled()) {
log.debug("The {} transformation is not configured to delete row. Not sending the delete through.", transformation.getTransformId());
}
break;
}
}
}
} catch (IgnoreRowException ex) {
// ignore this row
if (log.isDebugEnabled()) {
log.debug("Transform indicated that the target row should be ignored with a target key of: {}", ArrayUtils.toString(data.getKeyValues()));
}
}
return persistData;
}
use of org.jumpmind.symmetric.io.data.DataEventType in project symmetric-ds by JumpMind.
the class TransformWriter method write.
public void write(CsvData data) {
DataEventType eventType = data.getDataEventType();
if (activeTransforms != null && activeTransforms.size() > 0 && isTransformable(eventType)) {
if (data.requiresTable() && sourceTable == null && context.getLastParsedTable() != null) {
// if we cross batches and the table isn't specified, then
// use the last table we used
start(context.getLastParsedTable());
}
long ts = System.currentTimeMillis();
Map<String, String> sourceValues = data.toColumnNameValuePairs(this.sourceTable.getColumnNames(), CsvData.ROW_DATA);
Map<String, String> oldSourceValues = null;
if (data.contains(CsvData.OLD_DATA)) {
oldSourceValues = data.toColumnNameValuePairs(this.sourceTable.getColumnNames(), CsvData.OLD_DATA);
}
Map<String, String> sourceKeyValues = null;
if (data.contains(CsvData.PK_DATA)) {
sourceKeyValues = data.toKeyColumnValuePairs(this.sourceTable);
}
if (eventType == DataEventType.DELETE) {
sourceValues = oldSourceValues;
if (sourceValues == null || sourceValues.size() == 0) {
sourceValues = sourceKeyValues;
}
}
if (log.isDebugEnabled()) {
log.debug("{} transformation(s) started because of {} on {}. The original row data was: {}", new Object[] { activeTransforms.size(), eventType.toString(), this.sourceTable.getFullyQualifiedTableName(), sourceValues });
}
List<TransformedData> dataThatHasBeenTransformed = new ArrayList<TransformedData>();
TransformTable[] transformTables = activeTransforms.toArray(new TransformTable[activeTransforms.size()]);
if (eventType == DataEventType.DELETE) {
CollectionUtils.reverseArray(transformTables);
}
for (TransformTable transformation : transformTables) {
if (eventType == DataEventType.INSERT && transformation.isUpdateFirst()) {
eventType = DataEventType.UPDATE;
}
dataThatHasBeenTransformed.addAll(transform(eventType, context, transformation, sourceKeyValues, oldSourceValues, sourceValues));
}
for (TransformedData transformedData : dataThatHasBeenTransformed) {
Table transformedTable = transformedData.buildTargetTable();
CsvData csvData = transformedData.buildTargetCsvData();
long transformTimeInMs = System.currentTimeMillis() - ts;
boolean processData = true;
if (lastTransformedTable == null || !lastTransformedTable.equals(transformedTable)) {
if (lastTransformedTable != null) {
this.nestedWriter.end(lastTransformedTable);
}
processData = this.nestedWriter.start(transformedTable);
if (!processData) {
lastTransformedTable = null;
} else {
lastTransformedTable = transformedTable;
}
}
if (processData || !csvData.requiresTable()) {
this.nestedWriter.write(csvData);
}
Statistics stats = this.nestedWriter.getStatistics().get(batch);
if (stats != null) {
stats.increment(DataWriterStatisticConstants.TRANSFORMMILLIS, transformTimeInMs);
}
ts = System.currentTimeMillis();
}
} else {
if (sourceTable != null) {
super.start(sourceTable);
}
super.write(data);
if (sourceTable != null) {
super.end(sourceTable);
}
}
}
use of org.jumpmind.symmetric.io.data.DataEventType in project symmetric-ds by JumpMind.
the class AbstractDataRouter method getDataMap.
protected Map<String, String> getDataMap(DataMetaData dataMetaData, ISymmetricDialect symmetricDialect) {
Map<String, String> data = null;
DataEventType dml = dataMetaData.getData().getDataEventType();
switch(dml) {
case UPDATE:
data = new LinkedCaseInsensitiveMap<String>(dataMetaData.getTable().getColumnCount() * 4);
data.putAll(getNewDataAsString(null, dataMetaData, symmetricDialect));
data.putAll(getOldDataAsString(OLD_, dataMetaData, symmetricDialect));
break;
case INSERT:
data = new LinkedCaseInsensitiveMap<String>(dataMetaData.getTable().getColumnCount() * 4);
data.putAll(getNewDataAsString(null, dataMetaData, symmetricDialect));
Map<String, String> map = getNullData(OLD_, dataMetaData);
data.putAll(map);
break;
case DELETE:
data = new LinkedCaseInsensitiveMap<String>(dataMetaData.getTable().getColumnCount() * 4);
data.putAll(getOldDataAsString(null, dataMetaData, symmetricDialect));
data.putAll(getOldDataAsString(OLD_, dataMetaData, symmetricDialect));
break;
default:
data = new LinkedCaseInsensitiveMap<String>(1);
break;
}
if (data != null) {
if (data.size() == 0) {
data.putAll(getPkDataAsString(dataMetaData, symmetricDialect));
}
data.put("EXTERNAL_DATA", dataMetaData.getData().getExternalData());
}
return data;
}
use of org.jumpmind.symmetric.io.data.DataEventType in project symmetric-ds by JumpMind.
the class MsSqlBulkDatabaseWriter method write.
public void write(CsvData data) {
DataEventType dataEventType = data.getDataEventType();
switch(dataEventType) {
case INSERT:
statistics.get(batch).increment(DataWriterStatisticConstants.STATEMENTCOUNT);
statistics.get(batch).increment(DataWriterStatisticConstants.LINENUMBER);
statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
try {
String[] parsedData = data.getParsedData(CsvData.ROW_DATA);
if (needsBinaryConversion) {
Column[] columns = targetTable.getColumns();
for (int i = 0; i < columns.length; i++) {
if (columns[i].isOfBinaryType()) {
if (batch.getBinaryEncoding().equals(BinaryEncoding.BASE64) && parsedData[i] != null) {
parsedData[i] = new String(Hex.encodeHex(Base64.decodeBase64(parsedData[i].getBytes())));
}
}
}
}
OutputStream out = this.stagedInputFile.getOutputStream();
if (needsColumnsReordered) {
Map<String, String> mapData = data.toColumnNameValuePairs(targetTable.getColumnNames(), CsvData.ROW_DATA);
String[] columnNames = databaseTable.getColumnNames();
for (int i = 0; i < columnNames.length; i++) {
String columnData = mapData.get(columnNames[i]);
if (columnData != null) {
out.write(columnData.getBytes());
}
if (i + 1 < columnNames.length) {
out.write(fieldTerminator.getBytes());
}
}
} else {
for (int i = 0; i < parsedData.length; i++) {
if (parsedData[i] != null) {
out.write(parsedData[i].getBytes());
}
if (i + 1 < parsedData.length) {
out.write(fieldTerminator.getBytes());
}
}
}
out.write(rowTerminator.getBytes());
loadedRows++;
} catch (Exception ex) {
throw getPlatform().getSqlTemplate().translate(ex);
} finally {
statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
}
break;
case UPDATE:
case DELETE:
default:
flush();
super.write(data);
break;
}
if (loadedRows >= maxRowsBeforeFlush) {
flush();
}
}
Aggregations