use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class DefaultDatabaseWriter method lookupTableAtTarget.
@Override
protected Table lookupTableAtTarget(Table sourceTable) {
String tableNameKey = sourceTable.getTableKey();
Table table = targetTables.get(tableNameKey);
if (table == null) {
table = platform.getTableFromCache(sourceTable.getCatalog(), sourceTable.getSchema(), sourceTable.getName(), false);
if (table != null) {
table = table.copyAndFilterColumns(sourceTable.getColumnNames(), sourceTable.getPrimaryKeyColumnNames(), this.writerSettings.isUsePrimaryKeysFromSource());
Column[] columns = table.getColumns();
for (Column column : columns) {
if (column != null) {
int typeCode = column.getMappedTypeCode();
if (this.writerSettings.isTreatDateTimeFieldsAsVarchar() && (typeCode == Types.DATE || typeCode == Types.TIME || typeCode == Types.TIMESTAMP)) {
column.setMappedTypeCode(Types.VARCHAR);
}
}
}
targetTables.put(tableNameKey, table);
}
}
return table;
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class DefaultDatabaseWriter method delete.
@Override
protected LoadStatus delete(CsvData data, boolean useConflictDetection) {
try {
statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
Conflict conflict = writerSettings.pickConflict(this.targetTable, batch);
Map<String, String> lookupDataMap = null;
if (requireNewStatement(DmlType.DELETE, data, useConflictDetection, useConflictDetection, conflict.getDetectType())) {
this.lastUseConflictDetection = useConflictDetection;
List<Column> lookupKeys = null;
if (!useConflictDetection) {
lookupKeys = targetTable.getPrimaryKeyColumnsAsList();
} else {
switch(conflict.getDetectType()) {
case USE_OLD_DATA:
lookupKeys = targetTable.getColumnsAsList();
break;
case USE_VERSION:
case USE_TIMESTAMP:
List<Column> lookupColumns = new ArrayList<Column>();
Column versionColumn = targetTable.getColumnWithName(conflict.getDetectExpression());
if (versionColumn != null) {
lookupColumns.add(versionColumn);
} else {
log.error("Could not find the timestamp/version column with the name {}. Defaulting to using primary keys for the lookup.", conflict.getDetectExpression());
}
Column[] pks = targetTable.getPrimaryKeyColumns();
for (Column column : pks) {
// make sure all of the PK keys are in the list
// only once and are always at the end of the
// list
lookupColumns.remove(column);
lookupColumns.add(column);
}
lookupKeys = lookupColumns;
break;
case USE_PK_DATA:
default:
lookupKeys = targetTable.getPrimaryKeyColumnsAsList();
break;
}
}
if (lookupKeys == null || lookupKeys.size() == 0) {
lookupKeys = targetTable.getColumnsAsList();
}
int lookupKeyCountBeforeColumnRemoval = lookupKeys.size();
Iterator<Column> it = lookupKeys.iterator();
while (it.hasNext()) {
Column col = it.next();
if ((platform.isLob(col.getMappedTypeCode()) && data.isNoBinaryOldData()) || !platform.canColumnBeUsedInWhereClause(col)) {
it.remove();
}
}
if (lookupKeys.size() == 0) {
String msg = "There are no keys defined for " + targetTable.getFullyQualifiedTableName() + ". Cannot build an update statement. ";
if (lookupKeyCountBeforeColumnRemoval > 0) {
msg += "The only keys defined are binary and they have been removed.";
}
throw new IllegalStateException(msg);
}
lookupDataMap = getLookupDataMap(data, conflict);
boolean[] nullKeyValues = new boolean[lookupKeys.size()];
for (int i = 0; i < lookupKeys.size(); i++) {
Column column = lookupKeys.get(i);
nullKeyValues[i] = !column.isRequired() && lookupDataMap.get(column.getName()) == null;
}
this.currentDmlStatement = platform.createDmlStatement(DmlType.DELETE, targetTable.getCatalog(), targetTable.getSchema(), targetTable.getName(), lookupKeys.toArray(new Column[lookupKeys.size()]), null, nullKeyValues, writerSettings.getTextColumnExpression());
if (log.isDebugEnabled()) {
log.debug("Preparing dml: " + this.currentDmlStatement.getSql());
}
transaction.prepare(this.currentDmlStatement.getSql());
}
try {
lookupDataMap = lookupDataMap == null ? getLookupDataMap(data, conflict) : lookupDataMap;
long count = execute(data, this.currentDmlStatement.getLookupKeyData(lookupDataMap));
statistics.get(batch).increment(DataWriterStatisticConstants.DELETECOUNT, count);
if (count > 0) {
return LoadStatus.SUCCESS;
} else {
// since a delete conflicted, there's no row to delete, so no cur data.
context.put(CUR_DATA, null);
return LoadStatus.CONFLICT;
}
} catch (SqlException ex) {
if (platform.getSqlTemplate().isUniqueKeyViolation(ex) && !platform.getDatabaseInfo().isRequiresSavePointsInTransaction()) {
// since a delete conflicted, there's no row to delete, so no cur data.
context.put(CUR_DATA, null);
return LoadStatus.CONFLICT;
} else {
throw ex;
}
}
} catch (SqlException ex) {
logFailureDetails(ex, data, true);
throw ex;
} finally {
statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
}
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class XmlDataReader method readNext.
protected void readNext() {
try {
Map<String, String> rowData = new LinkedHashMap<String, String>();
String columnName = null;
CsvData data = null;
Table table = null;
String catalog = null;
String schema = null;
int eventType = parser.next();
while (eventType != XmlPullParser.END_DOCUMENT) {
switch(eventType) {
case XmlPullParser.TEXT:
if (columnName != null) {
rowData.put(columnName, parser.getText());
columnName = null;
}
break;
case XmlPullParser.START_TAG:
String name = parser.getName();
if ("row".equalsIgnoreCase(name)) {
data = new CsvData();
if (table != null) {
table.removeAllColumns();
}
data.setDataEventType(DataEventType.INSERT);
} else if ("field".equalsIgnoreCase(name)) {
boolean nullValue = false;
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("name".equalsIgnoreCase(attributeName)) {
columnName = attributeValue;
} else if ("xsi:nil".equalsIgnoreCase(attributeName)) {
nullValue = true;
}
}
if (nullValue) {
rowData.put(columnName, null);
columnName = null;
}
} else if ("table_data".equalsIgnoreCase(name)) {
Batch batch = new Batch();
batch.setBinaryEncoding(BinaryEncoding.BASE64);
next.add(batch);
table = new Table();
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("name".equalsIgnoreCase(attributeName)) {
table.setName(attributeValue);
}
}
next.add(table);
} else if ("table".equalsIgnoreCase(name)) {
Batch batch = new Batch();
batch.setBinaryEncoding(BinaryEncoding.BASE64);
next.add(batch);
table = DatabaseXmlUtil.nextTable(parser);
next.add(table);
Database db = new Database();
db.setName("dbimport");
db.setCatalog(catalog);
db.setSchema(schema);
db.addTable(table);
String xml = DatabaseXmlUtil.toXml(db);
data = new CsvData(DataEventType.CREATE);
data.putCsvData(CsvData.ROW_DATA, CsvUtils.escapeCsvData(xml));
next.add(data);
} else if ("database".equalsIgnoreCase(name)) {
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("catalog".equalsIgnoreCase(attributeName)) {
catalog = attributeValue;
} else if ("schema".equalsIgnoreCase(attributeName)) {
schema = attributeValue;
}
}
}
break;
case XmlPullParser.END_TAG:
name = parser.getName();
if ("row".equalsIgnoreCase(name)) {
String[] columnNames = rowData.keySet().toArray(new String[rowData.keySet().size()]);
for (String colName : columnNames) {
table.addColumn(new Column(colName));
}
String[] columnValues = rowData.values().toArray(new String[rowData.values().size()]);
data.putParsedData(CsvData.ROW_DATA, columnValues);
if (this.table == null || !this.table.equals(table)) {
next.add(table);
}
next.add(data);
rowData = new LinkedHashMap<String, String>();
} else if ("table_data".equalsIgnoreCase(name)) {
if (batch != null) {
batch.setComplete(true);
}
} else if ("field".equalsIgnoreCase(name)) {
columnName = null;
}
break;
}
eventType = parser.next();
}
} catch (IOException ex) {
throw new IoException(ex);
} catch (XmlPullParserException ex) {
throw new RuntimeException(ex);
}
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class AdditiveColumnTransform method transform.
public String transform(IDatabasePlatform platform, DataContext context, TransformColumn column, TransformedData data, Map<String, String> sourceValues, String newValue, String oldValue) throws IgnoreColumnException, IgnoreRowException {
BigDecimal multiplier = new BigDecimal(1.00);
if (StringUtils.isNotBlank(column.getTransformExpression())) {
multiplier = new BigDecimal(column.getTransformExpression());
}
Table table = platform.getTableFromCache(data.getCatalogName(), data.getSchemaName(), data.getTableName(), false);
if (table == null) {
if (log.isDebugEnabled()) {
log.debug("Could not find the target table {}", data.getFullyQualifiedTableName());
}
throw new IgnoreColumnException();
} else if (table.getColumnWithName(column.getTargetColumnName()) == null) {
if (log.isDebugEnabled()) {
log.debug("Could not find the target column {}", column.getTargetColumnName());
}
throw new IgnoreColumnException();
} else {
if (log.isDebugEnabled()) {
log.debug("Old, new, transform expression as received: " + oldValue + ", " + newValue + ", " + column.getTransformExpression());
}
if (!StringUtils.isNotBlank(newValue) || data.getSourceDmlType() == DataEventType.DELETE) {
newValue = "0";
}
if (!StringUtils.isNotBlank(oldValue)) {
oldValue = "0";
}
BigDecimal delta = new BigDecimal(newValue);
delta = delta.subtract(new BigDecimal(oldValue));
delta = delta.multiply(multiplier);
newValue = delta.toString();
String quote = platform.getDdlBuilder().isDelimitedIdentifierModeOn() ? platform.getDatabaseInfo().getDelimiterToken() : "";
StringBuilder sql = new StringBuilder(String.format("update %s set %s=%s+(%s) where ", getFullyQualifiedTableName(platform, data.getSchemaName(), data.getCatalogName(), data.getTableName()), quote + column.getTargetColumnName() + quote, quote + column.getTargetColumnName() + quote, newValue));
String[] keyNames = data.getKeyNames();
List<Column> columns = new ArrayList<Column>();
List<String> keyValuesList = new ArrayList<String>();
boolean addedFirstKey = false;
for (int i = 0; i < keyNames.length; i++) {
Column targetCol = table.getColumnWithName(keyNames[i]);
if (targetCol != null) {
columns.add(targetCol);
keyValuesList.add(sourceValues.get(keyNames[i]));
if (addedFirstKey) {
sql.append("and ");
} else {
addedFirstKey = true;
}
sql.append(quote);
sql.append(keyNames[i]);
sql.append(quote);
sql.append("=? ");
}
}
if (log.isDebugEnabled()) {
log.debug("SQL: " + sql);
}
ISqlTransaction transaction = context.findTransaction();
if (0 < transaction.prepareAndExecute(sql.toString(), platform.getObjectValues(context.getBatch().getBinaryEncoding(), keyValuesList.toArray(new String[keyValuesList.size()]), columns.toArray(new Column[columns.size()])))) {
throw new IgnoreColumnException();
}
}
return newValue;
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class DefaultDatabaseWriterConflictResolver method isTimestampNewer.
protected boolean isTimestampNewer(Conflict conflict, AbstractDatabaseWriter writer, CsvData data) {
DefaultDatabaseWriter databaseWriter = (DefaultDatabaseWriter) writer;
IDatabasePlatform platform = databaseWriter.getPlatform();
String columnName = conflict.getDetectExpression();
Table targetTable = writer.getTargetTable();
Table sourceTable = writer.getSourceTable();
String[] pkData = data.getPkData(targetTable);
Object[] objectValues = databaseWriter.getPlatform().getObjectValues(writer.getBatch().getBinaryEncoding(), pkData, targetTable.getPrimaryKeyColumns());
DmlStatement stmt = databaseWriter.getPlatform().createDmlStatement(DmlType.FROM, targetTable, writer.getWriterSettings().getTextColumnExpression());
Column column = targetTable.getColumnWithName(columnName);
if (column == null) {
throw new RuntimeException(String.format("Could not find a timestamp column with a name of %s on the table %s. " + "Please check your conflict resolution configuration", columnName, targetTable.getQualifiedTableName()));
}
String sql = stmt.getColumnsSql(new Column[] { column });
Map<String, String> newData = data.toColumnNameValuePairs(sourceTable.getColumnNames(), CsvData.ROW_DATA);
String loadingStr = newData.get(columnName);
Date loadingTs = null;
Date existingTs = null;
if (column.isTimestampWithTimezone()) {
// Get the existingTs with timezone
String existingStr = databaseWriter.getTransaction().queryForObject(sql, String.class, objectValues);
// because the row doesn't exist, then existing simply needs to be null
if (existingStr != null) {
int split = existingStr.lastIndexOf(" ");
existingTs = FormatUtils.parseDate(existingStr.substring(0, split).trim(), FormatUtils.TIMESTAMP_PATTERNS, TimeZone.getTimeZone(existingStr.substring(split).trim()));
}
// Get the loadingTs with timezone
int split = loadingStr.lastIndexOf(" ");
loadingTs = FormatUtils.parseDate(loadingStr.substring(0, split).trim(), FormatUtils.TIMESTAMP_PATTERNS, TimeZone.getTimeZone(loadingStr.substring(split).trim()));
} else {
// Get the existingTs
existingTs = databaseWriter.getTransaction().queryForObject(sql, Timestamp.class, objectValues);
// Get the loadingTs
Object[] values = platform.getObjectValues(writer.getBatch().getBinaryEncoding(), new String[] { loadingStr }, new Column[] { column });
if (values[0] instanceof Date) {
loadingTs = (Date) values[0];
} else if (values[0] instanceof String && column.getJdbcTypeName().equalsIgnoreCase(TypeMap.DATETIME2)) {
// SQL Server DateTime2 type is treated as a string internally.
loadingTs = databaseWriter.getPlatform().parseDate(Types.VARCHAR, (String) values[0], false);
} else {
throw new ParseException("Could not parse " + columnName + " with a value of " + loadingStr + " for purposes of conflict detection");
}
}
return existingTs == null || loadingTs.compareTo(existingTs) > 0;
}
Aggregations