Search in sources :

Example 11 with DfJDBCException

use of org.dbflute.exception.DfJDBCException in project dbflute-core by dbflute.

the class DfCraftDiffAssertProvider method createTableEqualsAssertHandler.

protected DfCraftDiffAssertHandler createTableEqualsAssertHandler(final String craftTitle, final String tableHint, final String exceptExp) {
    return new DfCraftDiffAssertHandler(_craftMetaDir, _nextDirection, craftTitle) {

        @Override
        protected List<Map<String, String>> selectDiffDataList(File sqlFile, Statement st, String sql) throws SQLException {
            final List<Map<String, String>> unifiedList = new ArrayList<Map<String, String>>();
            final Map<String, String> tableSqlMap = toTableSqlMap(tableHint, exceptExp);
            for (DfTableMeta tableMeta : _tableList) {
                final String tableSql = tableSqlMap.get(tableMeta.getTableName());
                if (tableSql == null) {
                    continue;
                }
                final List<Map<String, String>> selectedList = super.selectDiffDataList(sqlFile, st, tableSql);
                final List<DfColumnMeta> columnMetaList = tableMeta.getLazyColumnMetaList();
                if (columnMetaList == null) {
                    String msg = "Not found the column meta for the table: " + tableMeta;
                    throw new IllegalStateException(msg);
                }
                if (columnMetaList.isEmpty()) {
                    String msg = "Empty column meta for the table: " + tableMeta;
                    throw new IllegalStateException(msg);
                }
                // first column should be PK
                final DfColumnMeta pkCol = columnMetaList.get(0);
                final String pkName = pkCol.getColumnName();
                for (Map<String, String> recordMap : selectedList) {
                    final String pkValue = recordMap.remove(pkName);
                    final Map<String, String> adjustedMap = StringKeyMap.createAsFlexibleOrdered();
                    final String uniqueCode = tableMeta.getTableName() + "::" + pkValue;
                    adjustedMap.put(TABLE_EQUALS_UNIQUE_NAME, uniqueCode);
                    final StringBuilder valueSb = new StringBuilder();
                    int columnIndex = 0;
                    for (Entry<String, String> entry : recordMap.entrySet()) {
                        // no PK loop
                        if (columnIndex > 0) {
                            valueSb.append("|");
                        }
                        final String columnValue = entry.getValue();
                        valueSb.append(columnValue);
                        ++columnIndex;
                    }
                    adjustedMap.put(TABLE_EQUALS_DATA_NAME, valueSb.toString());
                    unifiedList.add(adjustedMap);
                }
            }
            return unifiedList;
        }

        @Override
        protected void handleSQLException(SQLException e, String sql) throws SQLException {
            String msg = "Failed to execute the SQL for CraftDiff.";
            msg = msg + ln() + "The SQL has been switched so see the SQL bellow:";
            msg = msg + ln() + sql;
            throw new DfJDBCException(msg, e);
        }
    };
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) DfJDBCException(org.dbflute.exception.DfJDBCException) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) StringKeyMap(org.dbflute.helper.StringKeyMap)

Example 12 with DfJDBCException

use of org.dbflute.exception.DfJDBCException in project dbflute-core by dbflute.

the class DfAbsractDataWriter method processNull.

// ===================================================================================
// Process Binding
// ===============
// -----------------------------------------------------
// Null Value
// ----------
protected boolean processNull(String dataDirectory, String tableName, String columnName, Object value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
    if (!isNullValue(value)) {
        return false;
    }
    Map<String, Integer> cacheMap = _nullTypeCacheMap.get(tableName);
    if (cacheMap == null) {
        cacheMap = StringKeyMap.createAsFlexibleOrdered();
        _nullTypeCacheMap.put(tableName, cacheMap);
    }
    final Integer cachedType = cacheMap.get(columnName);
    if (cachedType != null) {
        // cache hit
        // basically no exception
        ps.setNull(bindCount, cachedType);
        return true;
    }
    final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
    if (columnInfo != null) {
        // use mapped type at first
        final String mappedJdbcType = _columnHandler.getColumnJdbcType(columnInfo);
        final Integer mappedJdbcDefValue = TypeMap.getJdbcDefValueByJdbcType(mappedJdbcType);
        try {
            ps.setNull(bindCount, mappedJdbcDefValue);
            cacheMap.put(columnName, mappedJdbcDefValue);
        } catch (SQLException e) {
            // retry by plain type
            final int plainJdbcDefValue = columnInfo.getJdbcDefValue();
            try {
                ps.setNull(bindCount, plainJdbcDefValue);
                cacheMap.put(columnName, plainJdbcDefValue);
            } catch (SQLException ignored) {
                final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
                br.addNotice("Failed to execute setNull(bindCount, jdbcDefValue).");
                br.addItem("Column");
                br.addElement(tableName + "." + columnName);
                br.addElement(columnInfo.toString());
                br.addItem("Mapped JDBC Type");
                br.addElement(mappedJdbcType);
                br.addItem("First JDBC Def-Value");
                br.addElement(mappedJdbcDefValue);
                br.addItem("Retry JDBC Def-Value");
                br.addElement(plainJdbcDefValue);
                br.addItem("Retry Message");
                br.addElement(ignored.getMessage());
                String msg = br.buildExceptionMessage();
                throw new DfJDBCException(msg, e);
            }
        }
    } else {
        // basically no way
        // as default
        Integer tryType = Types.VARCHAR;
        try {
            ps.setNull(bindCount, tryType);
            cacheMap.put(columnName, tryType);
        } catch (SQLException e) {
            tryType = Types.NUMERIC;
            try {
                ps.setNull(bindCount, tryType);
                cacheMap.put(columnName, tryType);
            } catch (SQLException ignored) {
                tryType = Types.TIMESTAMP;
                try {
                    ps.setNull(bindCount, tryType);
                    cacheMap.put(columnName, tryType);
                } catch (SQLException iignored) {
                    tryType = Types.OTHER;
                    try {
                        // last try
                        ps.setNull(bindCount, tryType);
                        cacheMap.put(columnName, tryType);
                    } catch (SQLException iiignored) {
                        throw e;
                    }
                }
            }
        }
    }
    return true;
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) DfJDBCException(org.dbflute.exception.DfJDBCException) SQLException(java.sql.SQLException) ExceptionMessageBuilder(org.dbflute.helper.message.ExceptionMessageBuilder)

Example 13 with DfJDBCException

use of org.dbflute.exception.DfJDBCException in project dbflute-core by dbflute.

the class DfDelimiterDataWriterImpl method writeData.

// ===================================================================================
// Write
// =====
public void writeData(DfDelimiterDataResultInfo resultInfo) throws IOException {
    _log.info("/= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ");
    _log.info("writeData(" + _fileName + ")");
    _log.info("= = = = = = =/");
    FileInputStream fis = null;
    InputStreamReader ir = null;
    BufferedReader br = null;
    final String dataDirectory = Srl.substringLastFront(_fileName, "/");
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final String tableDbName = extractTableDbName();
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(_fileName, tableDbName);
    }
    // process before handling table
    beforeHandlingTable(tableDbName, columnMetaMap);
    String lineString = null;
    String preContinueString = null;
    String executedSql = null;
    final List<String> columnNameList = new ArrayList<String>();
    final List<String> valueList = new ArrayList<String>();
    final boolean canBatchUpdate = !isMergedSuppressBatchUpdate(dataDirectory);
    final File dataFile = new File(_fileName);
    Connection conn = null;
    PreparedStatement ps = null;
    try {
        fis = new FileInputStream(dataFile);
        ir = new InputStreamReader(fis, _encoding);
        br = new BufferedReader(ir);
        FirstLineInfo firstLineInfo = null;
        int loopIndex = -1;
        int rowNumber = 0;
        int addedBatchSize = 0;
        while (true) {
            ++loopIndex;
            lineString = br.readLine();
            if (lineString == null) {
                break;
            }
            // - - - - - - - - - -/
            if (loopIndex == 0) {
                firstLineInfo = analyzeFirstLineInfo(_delimiter, lineString);
                setupColumnNameList(dataDirectory, dataFile, tableDbName, columnMetaMap, firstLineInfo, columnNameList);
                continue;
            }
            // /- - - - - - - - - - - - - - -
            // analyze values in line strings
            // - - - - - - - - - -/
            lineString = filterLineString(lineString);
            {
                if (preContinueString != null && !preContinueString.equals("")) {
                    lineString = preContinueString + "\n" + lineString;
                }
                final ValueLineInfo valueLineInfo = arrangeValueList(lineString, _delimiter);
                // empty string resolved later
                final List<String> ls = valueLineInfo.getValueList();
                if (valueLineInfo.isContinueNextLine()) {
                    preContinueString = ls.remove(ls.size() - 1);
                    valueList.addAll(ls);
                    continue;
                }
                valueList.addAll(ls);
            }
            // - - - - - - - - - -/
            if (isDifferentColumnValueCount(firstLineInfo, valueList)) {
                handleDifferentColumnValueCount(resultInfo, dataDirectory, tableDbName, firstLineInfo, valueList);
                // clear temporary variables
                valueList.clear();
                preContinueString = null;
                continue;
            }
            // *valid record is prepared here
            ++rowNumber;
            // /- - - - - - - - - - - - - - - -
            // process registration to database
            // - - - - - - - - - -/
            final DfDelimiterDataWriteSqlBuilder sqlBuilder = createSqlBuilder(resultInfo, tableDbName, columnMetaMap, columnNameList, valueList);
            if (conn == null) {
                conn = _dataSource.getConnection();
            }
            if (ps == null) {
                // for performance (suppress implicit transaction per SQL)
                beginTransaction(conn);
                executedSql = sqlBuilder.buildSql();
                ps = prepareStatement(conn, executedSql);
            }
            final Map<String, Object> columnValueMap = sqlBuilder.setupParameter();
            final Set<String> sysdateColumnSet = sqlBuilder.getSysdateColumnSet();
            resolveRelativeDate(dataDirectory, tableDbName, columnValueMap, columnMetaMap, sysdateColumnSet, rowNumber);
            handleLoggingInsert(tableDbName, columnValueMap, loggingInsertType, rowNumber);
            int bindCount = 1;
            for (Entry<String, Object> entry : columnValueMap.entrySet()) {
                final String columnName = entry.getKey();
                final Object obj = entry.getValue();
                // - - - - - - - - - -/
                if (processNull(dataDirectory, tableDbName, columnName, obj, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // It registers the value to statement by the type.
                if (processNotNullNotString(dataDirectory, tableDbName, columnName, obj, conn, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // /- - - - - - - - - - - - - - - - - -
                // process NotNull and StringExpression
                // - - - - - - - - - -/
                final String value = (String) obj;
                processNotNullString(dataDirectory, dataFile, tableDbName, columnName, value, conn, ps, bindCount, columnMetaMap, rowNumber);
                bindCount++;
            }
            if (canBatchUpdate) {
                // mainly here
                ps.addBatch();
            } else {
                ps.execute();
            }
            ++addedBatchSize;
            if (isBatchSizeLimit(addedBatchSize)) {
                // transaction scope
                if (canBatchUpdate) {
                    // mainly here
                    // this is supported in only delimiter data writer
                    // because delimiter data can treat large data
                    // to avoid OutOfMemory
                    ps.executeBatch();
                }
                commitTransaction(conn);
                addedBatchSize = 0;
                close(ps);
                ps = null;
            }
            // *one record is finished here
            // clear temporary variables
            // if an exception occurs from execute() or addBatch(),
            // this valueList is to be information for debug
            valueList.clear();
            preContinueString = null;
        }
        if (ps != null && addedBatchSize > 0) {
            if (canBatchUpdate) {
                // mainly here
                ps.executeBatch();
            }
            commitTransaction(conn);
        }
        noticeLoadedRowSize(tableDbName, rowNumber);
        resultInfo.registerLoadedMeta(dataDirectory, _fileName, rowNumber);
        checkImplicitClassification(dataFile, tableDbName, columnNameList);
    } catch (FileNotFoundException e) {
        throw e;
    } catch (IOException e) {
        throw e;
    } catch (SQLException e) {
        DfJDBCException wrapped = DfJDBCException.voice(e);
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, wrapped);
        throw new DfDelimiterDataRegistrationFailureException(msg, wrapped.getNextException());
    } catch (RuntimeException e) {
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, null);
        throw new DfDelimiterDataRegistrationFailureException(msg, e);
    } finally {
        closeStream(fis, ir, br);
        commitJustInCase(conn);
        close(ps);
        close(conn);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) SQLException(java.sql.SQLException) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) DfJDBCException(org.dbflute.exception.DfJDBCException) ArrayList(java.util.ArrayList) List(java.util.List) DfDelimiterDataRegistrationFailureException(org.dbflute.exception.DfDelimiterDataRegistrationFailureException) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) InputStreamReader(java.io.InputStreamReader) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) BufferedReader(java.io.BufferedReader) File(java.io.File)

Example 14 with DfJDBCException

use of org.dbflute.exception.DfJDBCException in project dbflute-core by dbflute.

the class DfDBFluteTaskUtil method buildSQLExceptionMessage.

protected static void buildSQLExceptionMessage(ExceptionMessageBuilder br, SQLException e) {
    final String sqlState = DfJDBCException.extractSQLState(e);
    br.addItem("SQLState");
    br.addElement(sqlState);
    final Integer errorCode = DfJDBCException.extractErrorCode(e);
    br.addItem("ErrorCode");
    br.addElement(errorCode);
    br.addItem("SQLException");
    br.addElement(e.getClass().getName());
    if (e instanceof DfJDBCException) {
        br.addElement("*Look at the message on the stack trace");
    } else {
        br.addElement(DfJDBCException.extractMessage(e));
    }
    final SQLException nextEx = e.getNextException();
    if (nextEx != null) {
        br.addItem("NextException");
        br.addElement(nextEx.getClass().getName());
        br.addElement(DfJDBCException.extractMessage(nextEx));
        final SQLException nextNextEx = nextEx.getNextException();
        if (nextNextEx != null) {
            br.addItem("NextNextException");
            br.addElement(nextNextEx.getClass().getName());
            br.addElement(DfJDBCException.extractMessage(nextNextEx));
        }
    }
}
Also used : DfJDBCException(org.dbflute.exception.DfJDBCException) SQLException(java.sql.SQLException)

Example 15 with DfJDBCException

use of org.dbflute.exception.DfJDBCException in project dbflute-core by dbflute.

the class DfDataSourceHandler method createConnection.

protected Connection createConnection() throws SQLException {
    final Driver driverInstance = newDriver();
    final Properties info = prepareConnectionProperties();
    Connection conn = null;
    try {
        _log.info("...Connecting to database by data source:");
        conn = driverInstance.connect(_url, info);
    } catch (SQLException e) {
        String msg = "Failed to connect: url=" + _url + " user=" + _user;
        throw new DfJDBCException(msg, e);
    }
    if (conn == null) {
        String msg = "The driver didn't understand the URL: " + _url;
        throw new DfJDBCException(msg);
    }
    try {
        conn.setAutoCommit(_autoCommit);
    } catch (SQLException e) {
        String msg = "Failed to set auto commit: autocommit=" + _autoCommit;
        throw new DfJDBCException(msg, e);
    }
    return conn;
}
Also used : DfJDBCException(org.dbflute.exception.DfJDBCException) SQLException(java.sql.SQLException) Connection(java.sql.Connection) Driver(java.sql.Driver) Properties(java.util.Properties)

Aggregations

DfJDBCException (org.dbflute.exception.DfJDBCException)17 SQLException (java.sql.SQLException)12 Connection (java.sql.Connection)6 ExceptionMessageBuilder (org.dbflute.helper.message.ExceptionMessageBuilder)6 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)5 File (java.io.File)3 ArrayList (java.util.ArrayList)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 InputStreamReader (java.io.InputStreamReader)2 PreparedStatement (java.sql.PreparedStatement)2 List (java.util.List)2 DfDelimiterDataRegistrationFailureException (org.dbflute.exception.DfDelimiterDataRegistrationFailureException)2 DfXlsDataRegistrationFailureException (org.dbflute.exception.DfXlsDataRegistrationFailureException)2 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 Driver (java.sql.Driver)1 Statement (java.sql.Statement)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1