use of org.jumpmind.db.sql.SqlException in project symmetric-ds by JumpMind.
the class AbstractJdbcDdlReader method readColumn.
/*
* Extracts a column definition from the result set.
*
* @param metaData The database meta data
*
* @param values The column meta data values as defined by {@link
* #getColumnsForColumn()}
*
* @return The column
*/
protected Column readColumn(DatabaseMetaDataWrapper metaData, Map<String, Object> values) throws SQLException {
Column column = new Column();
PlatformColumn platformColumn = new PlatformColumn();
platformColumn.setName(platform.getName());
column.setName((String) values.get("COLUMN_NAME"));
String defaultValue = (String) values.get("COLUMN_DEF");
if (defaultValue == null) {
defaultValue = (String) values.get("COLUMN_DEFAULT");
}
if (defaultValue != null) {
defaultValue = defaultValue.trim();
column.setDefaultValue(defaultValue);
}
String typeName = (String) values.get("TYPE_NAME");
column.setJdbcTypeName(typeName);
Integer mappedType = mapUnknownJdbcTypeForColumn(values);
if (mappedType != null) {
column.setMappedTypeCode(mappedType);
} else {
column.setMappedTypeCode((Integer) values.get("DATA_TYPE"));
}
column.setJdbcTypeCode((Integer) values.get("DATA_TYPE"));
column.setPrecisionRadix(((Integer) values.get("NUM_PREC_RADIX")).intValue());
String columnSize = (String) values.get("COLUMN_SIZE");
int decimalDigits = ((Integer) values.get("DECIMAL_DIGITS")).intValue();
try {
platformColumn.setType(typeName);
if (isNotBlank(columnSize)) {
platformColumn.setSize(Integer.parseInt(columnSize));
}
platformColumn.setDecimalDigits(decimalDigits);
column.addPlatformColumn(platformColumn);
} catch (Exception ex) {
log.warn("", ex);
}
if (columnSize == null) {
columnSize = (String) _defaultSizes.get(new Integer(column.getMappedTypeCode()));
}
// we're setting the size after the precision and radix in case
// the database prefers to return them in the size value
column.setSize(columnSize);
if (decimalDigits != 0) {
// if there is a scale value, set it after the size (which probably
// did not contain
// a scale specification)
column.setScale(decimalDigits);
}
column.setRequired("NO".equalsIgnoreCase(((String) values.get("IS_NULLABLE")).trim()));
column.setDescription((String) values.get("REMARKS"));
return column;
}
use of org.jumpmind.db.sql.SqlException in project symmetric-ds by JumpMind.
the class DefaultDatabaseWriter method delete.
@Override
protected LoadStatus delete(CsvData data, boolean useConflictDetection) {
try {
statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
Conflict conflict = writerSettings.pickConflict(this.targetTable, batch);
Map<String, String> lookupDataMap = null;
if (requireNewStatement(DmlType.DELETE, data, useConflictDetection, useConflictDetection, conflict.getDetectType())) {
this.lastUseConflictDetection = useConflictDetection;
List<Column> lookupKeys = null;
if (!useConflictDetection) {
lookupKeys = targetTable.getPrimaryKeyColumnsAsList();
} else {
switch(conflict.getDetectType()) {
case USE_OLD_DATA:
lookupKeys = targetTable.getColumnsAsList();
break;
case USE_VERSION:
case USE_TIMESTAMP:
List<Column> lookupColumns = new ArrayList<Column>();
Column versionColumn = targetTable.getColumnWithName(conflict.getDetectExpression());
if (versionColumn != null) {
lookupColumns.add(versionColumn);
} else {
log.error("Could not find the timestamp/version column with the name {}. Defaulting to using primary keys for the lookup.", conflict.getDetectExpression());
}
Column[] pks = targetTable.getPrimaryKeyColumns();
for (Column column : pks) {
// make sure all of the PK keys are in the list
// only once and are always at the end of the
// list
lookupColumns.remove(column);
lookupColumns.add(column);
}
lookupKeys = lookupColumns;
break;
case USE_PK_DATA:
default:
lookupKeys = targetTable.getPrimaryKeyColumnsAsList();
break;
}
}
if (lookupKeys == null || lookupKeys.size() == 0) {
lookupKeys = targetTable.getColumnsAsList();
}
int lookupKeyCountBeforeColumnRemoval = lookupKeys.size();
Iterator<Column> it = lookupKeys.iterator();
while (it.hasNext()) {
Column col = it.next();
if ((platform.isLob(col.getMappedTypeCode()) && data.isNoBinaryOldData()) || !platform.canColumnBeUsedInWhereClause(col)) {
it.remove();
}
}
if (lookupKeys.size() == 0) {
String msg = "There are no keys defined for " + targetTable.getFullyQualifiedTableName() + ". Cannot build an update statement. ";
if (lookupKeyCountBeforeColumnRemoval > 0) {
msg += "The only keys defined are binary and they have been removed.";
}
throw new IllegalStateException(msg);
}
lookupDataMap = getLookupDataMap(data, conflict);
boolean[] nullKeyValues = new boolean[lookupKeys.size()];
for (int i = 0; i < lookupKeys.size(); i++) {
Column column = lookupKeys.get(i);
nullKeyValues[i] = !column.isRequired() && lookupDataMap.get(column.getName()) == null;
}
this.currentDmlStatement = platform.createDmlStatement(DmlType.DELETE, targetTable.getCatalog(), targetTable.getSchema(), targetTable.getName(), lookupKeys.toArray(new Column[lookupKeys.size()]), null, nullKeyValues, writerSettings.getTextColumnExpression());
if (log.isDebugEnabled()) {
log.debug("Preparing dml: " + this.currentDmlStatement.getSql());
}
transaction.prepare(this.currentDmlStatement.getSql());
}
try {
lookupDataMap = lookupDataMap == null ? getLookupDataMap(data, conflict) : lookupDataMap;
long count = execute(data, this.currentDmlStatement.getLookupKeyData(lookupDataMap));
statistics.get(batch).increment(DataWriterStatisticConstants.DELETECOUNT, count);
if (count > 0) {
return LoadStatus.SUCCESS;
} else {
// since a delete conflicted, there's no row to delete, so no cur data.
context.put(CUR_DATA, null);
return LoadStatus.CONFLICT;
}
} catch (SqlException ex) {
if (platform.getSqlTemplate().isUniqueKeyViolation(ex) && !platform.getDatabaseInfo().isRequiresSavePointsInTransaction()) {
// since a delete conflicted, there's no row to delete, so no cur data.
context.put(CUR_DATA, null);
return LoadStatus.CONFLICT;
} else {
throw ex;
}
}
} catch (SqlException ex) {
logFailureDetails(ex, data, true);
throw ex;
} finally {
statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
}
}
use of org.jumpmind.db.sql.SqlException in project symmetric-ds by JumpMind.
the class DbExportImportTest method importSqlData.
@Test
public void importSqlData() throws Exception {
ISymmetricEngine engine = getSymmetricEngine();
recreateImportTable();
assertCountDbImportTableRecords(0);
DbImport importCsv = new DbImport(engine.getDatabasePlatform());
importCsv.setFormat(DbImport.Format.SQL);
importCsv.importTables(getClass().getResourceAsStream("/test-dbimport-1-good.sql"));
assertCountDbImportTableRecords(5);
recreateImportTable();
assertCountDbImportTableRecords(0);
try {
importCsv.importTables(getClass().getResourceAsStream("/test-dbimport-1-bad-line-2.sql"));
Assert.fail("Expected a sql exception");
} catch (SqlException ex) {
}
assertCountDbImportTableRecords(0);
importCsv.setCommitRate(1);
importCsv.setForceImport(true);
importCsv.importTables(getClass().getResourceAsStream("/test-dbimport-1-bad-line-2.sql"));
assertCountDbImportTableRecords(4);
}
use of org.jumpmind.db.sql.SqlException in project symmetric-ds by JumpMind.
the class AbstractSymmetricEngine method uninstall.
public synchronized void uninstall() {
log.warn("Attempting an uninstall of all SymmetricDS database objects from the database");
stop();
log.info("Just cleaned {} files in the staging area during the uninstall.", getStagingManager().clean(0));
try {
Table table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_TRIGGER_ROUTER));
if (table != null) {
List<Grouplet> grouplets = groupletService.getGrouplets(true);
for (Grouplet grouplet : grouplets) {
groupletService.deleteGrouplet(grouplet);
}
List<TriggerRouter> triggerRouters = triggerRouterService.getTriggerRouters(false, true);
for (TriggerRouter triggerRouter : triggerRouters) {
triggerRouterService.deleteTriggerRouter(triggerRouter);
}
List<FileTriggerRouter> fileTriggerRouters = fileSyncService.getFileTriggerRouters();
for (FileTriggerRouter fileTriggerRouter : fileTriggerRouters) {
fileSyncService.deleteFileTriggerRouter(fileTriggerRouter);
}
List<Router> routers = triggerRouterService.getRouters(true);
for (Router router : routers) {
triggerRouterService.deleteRouter(router);
}
}
table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_CONFLICT));
if (table != null) {
// need to remove all conflicts before we can remove the node
// group links
List<ConflictNodeGroupLink> conflicts = dataLoaderService.getConflictSettingsNodeGroupLinks();
for (ConflictNodeGroupLink conflict : conflicts) {
dataLoaderService.delete(conflict);
}
}
table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_TRANSFORM_TABLE));
if (table != null) {
// need to remove all transforms before we can remove the node
// group links
List<TransformTableNodeGroupLink> transforms = transformService.getTransformTables(false);
for (TransformTableNodeGroupLink transformTable : transforms) {
transformService.deleteTransformTable(transformTable.getTransformId());
}
}
table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_ROUTER));
if (table != null) {
List<Router> objects = triggerRouterService.getRouters(true);
for (Router router : objects) {
triggerRouterService.deleteRouter(router);
}
}
table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_CONFLICT));
if (table != null) {
List<ConflictNodeGroupLink> objects = dataLoaderService.getConflictSettingsNodeGroupLinks();
for (ConflictNodeGroupLink obj : objects) {
dataLoaderService.delete(obj);
}
}
table = platform.readTableFromDatabase(null, null, TableConstants.getTableName(parameterService.getTablePrefix(), TableConstants.SYM_NODE_GROUP_LINK));
if (table != null) {
// remove the links so the symmetric table trigger will be
// removed
List<NodeGroupLink> links = configurationService.getNodeGroupLinks(false);
for (NodeGroupLink nodeGroupLink : links) {
configurationService.deleteNodeGroupLink(nodeGroupLink);
}
}
if (table != null) {
// this should remove all triggers because we have removed all the
// trigger configuration
triggerRouterService.syncTriggers();
}
} catch (SqlException ex) {
log.warn("Error while trying remove triggers on tables", ex);
}
// remove any additional triggers that may remain because they were not in trigger history
symmetricDialect.cleanupTriggers();
symmetricDialect.dropTablesAndDatabaseObjects();
// force cache to be cleared
nodeService.deleteIdentity();
parameterService.setDatabaseHasBeenInitialized(false);
log.warn("Finished uninstalling SymmetricDS database objects from the database");
}
use of org.jumpmind.db.sql.SqlException in project symmetric-ds by JumpMind.
the class AbstractSymmetricDialect method createTrigger.
/*
* Create the configured trigger. The catalog will be changed to the source
* schema if the source schema is configured.
*/
public void createTrigger(final StringBuilder sqlBuffer, final DataEventType dml, final Trigger trigger, final TriggerHistory hist, final Channel channel, final String tablePrefix, final Table table) {
log.info("Creating {} trigger for {}", hist.getTriggerNameForDmlType(dml), table.getFullyQualifiedTableName());
String previousCatalog = null;
String sourceCatalogName = table.getCatalog();
String defaultCatalog = platform.getDefaultCatalog();
String defaultSchema = platform.getDefaultSchema();
String triggerSql = triggerTemplate.createTriggerDDL(dml, trigger, hist, channel, tablePrefix, table, defaultCatalog, defaultSchema);
String postTriggerDml = createPostTriggerDDL(dml, trigger, hist, channel, tablePrefix, table);
if (parameterService.is(ParameterConstants.AUTO_SYNC_TRIGGERS)) {
ISqlTransaction transaction = null;
try {
transaction = this.platform.getSqlTemplate().startSqlTransaction(platform.getDatabaseInfo().isRequiresAutoCommitForDdl());
previousCatalog = switchCatalogForTriggerInstall(sourceCatalogName, transaction);
try {
log.debug("Running: {}", triggerSql);
transaction.execute(triggerSql);
} catch (SqlException ex) {
log.info("Failed to create trigger: {}", triggerSql);
throw ex;
}
if (StringUtils.isNotBlank(postTriggerDml)) {
try {
transaction.execute(postTriggerDml);
} catch (SqlException ex) {
log.info("Failed to create post trigger: {}", postTriggerDml);
throw ex;
}
}
transaction.commit();
} catch (SqlException ex) {
transaction.rollback();
throw ex;
} finally {
try {
if (sourceCatalogName != null && !sourceCatalogName.equalsIgnoreCase(previousCatalog)) {
switchCatalogForTriggerInstall(previousCatalog, transaction);
}
} finally {
transaction.close();
}
}
}
logSql(triggerSql, sqlBuffer);
logSql(postTriggerDml, sqlBuffer);
}
Aggregations