Search in sources :

Example 66 with BatchUpdateException

use of java.sql.BatchUpdateException in project voltdb by VoltDB.

the class JDBC4Statement method executeBatch.

// Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.
@Override
public int[] executeBatch() throws SQLException {
    checkClosed();
    closeCurrentResult();
    if (batch == null || batch.size() == 0) {
        return new int[0];
    }
    int[] updateCounts = new int[batch.size()];
    // keep a running total of update counts
    int runningUpdateCount = 0;
    int i = 0;
    try {
        for (; i < batch.size(); i++) {
            setCurrentResult(null, (int) batch.get(i).execute(sourceConnection.NativeConnection, this.m_timeout, sourceConnection.queryTimeOutUnit)[0].fetchRow(0).getLong(0));
            updateCounts[i] = this.lastUpdateCount;
            runningUpdateCount += this.lastUpdateCount;
        }
    } catch (SQLException x) {
        updateCounts[i] = EXECUTE_FAILED;
        throw new BatchUpdateException(Arrays.copyOf(updateCounts, i + 1), x);
    } finally {
        clearBatch();
    }
    // replace the update count from the last statement with the update count
    // from the last batch.
    this.lastUpdateCount = runningUpdateCount;
    return updateCounts;
}
Also used : SQLException(java.sql.SQLException) BatchUpdateException(java.sql.BatchUpdateException)

Example 67 with BatchUpdateException

use of java.sql.BatchUpdateException in project ignite by apache.

the class JdbcBatchUpdateTask method call.

/**
 * {@inheritDoc}
 */
@Override
public int[] call() throws Exception {
    IgniteCache<?, ?> cache = ignite.cache(cacheName);
    // Don't create caches on server nodes in order to avoid of data rebalancing.
    boolean start = ignite.configuration().isClientMode();
    if (cache == null && cacheName == null)
        cache = ((IgniteKernal) ignite).context().cache().getOrStartPublicCache(start, !loc && locQry);
    if (cache == null) {
        if (cacheName == null) {
            throw createJdbcSqlException("Failed to execute query. No suitable caches found.", IgniteQueryErrorCode.CACHE_NOT_FOUND);
        } else {
            throw createJdbcSqlException("Cache not found [cacheName=" + cacheName + ']', IgniteQueryErrorCode.CACHE_NOT_FOUND);
        }
    }
    int batchSize = F.isEmpty(sql) ? sqlBatch.size() : batchArgs.size();
    int[] updCntrs = new int[batchSize];
    int idx = 0;
    try {
        if (F.isEmpty(sql)) {
            for (; idx < batchSize; idx++) updCntrs[idx] = doSingleUpdate(cache, sqlBatch.get(idx), null);
        } else {
            for (; idx < batchSize; idx++) updCntrs[idx] = doSingleUpdate(cache, sql, batchArgs.get(idx));
        }
    } catch (Exception ex) {
        throw new BatchUpdateException(Arrays.copyOf(updCntrs, idx), ex);
    }
    return updCntrs;
}
Also used : IgniteKernal(org.apache.ignite.internal.IgniteKernal) BatchUpdateException(java.sql.BatchUpdateException) IgniteQueryErrorCode.createJdbcSqlException(org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode.createJdbcSqlException) SQLException(java.sql.SQLException) BatchUpdateException(java.sql.BatchUpdateException)

Example 68 with BatchUpdateException

use of java.sql.BatchUpdateException in project ignite by apache.

the class JdbcRequestHandler method executeBatchedQuery.

/**
 * Executes query and updates result counters.
 *
 * @param qry Query.
 * @param updCntsAcc Per query rows updates counter.
 * @param firstErr First error data - code and message.
 */
@SuppressWarnings("ForLoopReplaceableByForEach")
private void executeBatchedQuery(SqlFieldsQueryEx qry, List<Integer> updCntsAcc, IgniteBiTuple<Integer, String> firstErr) {
    try {
        if (cliCtx.isStream()) {
            List<Long> cnt = ctx.query().streamBatchedUpdateQuery(qry.getSchema(), cliCtx, qry.getSql(), qry.batchedArguments());
            for (int i = 0; i < cnt.size(); i++) updCntsAcc.add(cnt.get(i).intValue());
            return;
        }
        List<FieldsQueryCursor<List<?>>> qryRes = ctx.query().querySqlFields(null, qry, cliCtx, true, true);
        for (FieldsQueryCursor<List<?>> cur : qryRes) {
            if (cur instanceof BulkLoadContextCursor)
                throw new IgniteSQLException("COPY command cannot be executed in batch mode.");
            assert !((QueryCursorImpl) cur).isQuery();
            Iterator<List<?>> it = cur.iterator();
            if (it.hasNext()) {
                int val = ((Long) it.next().get(0)).intValue();
                updCntsAcc.add(val);
            }
        }
    } catch (Exception e) {
        int code;
        String msg;
        if (e instanceof IgniteSQLException) {
            BatchUpdateException batchCause = X.cause(e, BatchUpdateException.class);
            if (batchCause != null) {
                int[] updCntsOnErr = batchCause.getUpdateCounts();
                for (int i = 0; i < updCntsOnErr.length; i++) updCntsAcc.add(updCntsOnErr[i]);
                msg = batchCause.getMessage();
                code = batchCause.getErrorCode();
            } else {
                for (int i = 0; i < qry.batchedArguments().size(); i++) updCntsAcc.add(Statement.EXECUTE_FAILED);
                msg = e.getMessage();
                code = ((IgniteSQLException) e).statusCode();
            }
        } else {
            for (int i = 0; i < qry.batchedArguments().size(); i++) updCntsAcc.add(Statement.EXECUTE_FAILED);
            msg = e.getMessage();
            code = IgniteQueryErrorCode.UNKNOWN;
        }
        if (firstErr.isEmpty())
            firstErr.set(code, msg);
        else
            U.error(log, "Failed to execute batch query [qry=" + qry + ']', e);
    }
}
Also used : FieldsQueryCursor(org.apache.ignite.cache.query.FieldsQueryCursor) BulkLoadContextCursor(org.apache.ignite.cache.query.BulkLoadContextCursor) BatchUpdateException(java.sql.BatchUpdateException) IgniteSQLException(org.apache.ignite.internal.processors.query.IgniteSQLException) AtomicLong(java.util.concurrent.atomic.AtomicLong) IgniteSQLException(org.apache.ignite.internal.processors.query.IgniteSQLException) List(java.util.List) ArrayList(java.util.ArrayList) BatchUpdateException(java.sql.BatchUpdateException)

Example 69 with BatchUpdateException

use of java.sql.BatchUpdateException in project dbflute-core by dbflute.

the class DfXlsDataHandlerImpl method doWriteDataTable.

// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
    final String tableDbName = dataTable.getTableDbName();
    if (dataTable.getRowSize() == 0) {
        _log.info("*Not found row at the table: " + tableDbName);
        return 0;
    }
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(file, tableDbName);
    }
    beforeHandlingTable(tableDbName, columnMetaMap);
    checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
    final List<String> columnNameList = extractColumnNameList(dataTable);
    final String dataDirectory = resource.getDataDirectory();
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
    Connection conn = null;
    PreparedStatement ps = null;
    String preparedSql = null;
    SQLException retryEx = null;
    DfDataRow retryDataRow = null;
    try {
        conn = _dataSource.getConnection();
        int loadedRowCount = 0;
        final int rowSize = dataTable.getRowSize();
        boolean existsEmptyRow = false;
        for (int i = 0; i < rowSize; i++) {
            final DfDataRow dataRow = dataTable.getRow(i);
            if (ps == null) {
                final MyCreatedState myCreatedState = new MyCreatedState();
                preparedSql = myCreatedState.buildPreparedSql(dataRow);
                ps = conn.prepareStatement(preparedSql);
            }
            if (doWriteDataRow(// basic resources
            resource, // basic resources
            file, // basic resources
            dataTable, // basic resources
            dataRow, // meta data
            columnMetaMap, // JDBC resources
            conn, // JDBC resources
            ps, loggingInsertType, suppressBatchUpdate)) {
                // option
                ++loadedRowCount;
                if (existsEmptyRow) {
                    final int emptyRowNumber = dataRow.getRowNumber() - 1;
                    throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
                }
            } else {
                existsEmptyRow = true;
            }
        }
        if (existsEmptyRow) {
            _log.info("...Skipping the terminal garbage row");
        }
        if (!suppressBatchUpdate) {
            boolean beginTransaction = false;
            boolean transactionClosed = false;
            try {
                // transaction to retry after
                conn.setAutoCommit(false);
                beginTransaction = true;
                ps.executeBatch();
                conn.commit();
                transactionClosed = true;
            } catch (SQLException e) {
                conn.rollback();
                transactionClosed = true;
                if (!(e instanceof BatchUpdateException)) {
                    throw e;
                }
                _log.info("...Retrying by suppressing batch update: " + tableDbName);
                final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
                for (int i = 0; i < rowSize; i++) {
                    final DfDataRow dataRow = dataTable.getRow(i);
                    try {
                        doWriteDataRow(// basic resources
                        resource, // basic resources
                        file, // basic resources
                        dataTable, // basic resources
                        dataRow, // meta data
                        columnMetaMap, // JDBC resources
                        conn, // JDBC resources
                        retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
                        true);
                    } catch (SQLException rowEx) {
                        retryEx = rowEx;
                        retryDataRow = dataRow;
                        break;
                    }
                }
                try {
                    retryPs.close();
                } catch (SQLException ignored) {
                }
                throw e;
            } finally {
                if (!transactionClosed) {
                    // for other exceptions
                    conn.rollback();
                }
                if (beginTransaction) {
                    conn.setAutoCommit(true);
                }
            }
        }
        noticeLoadedRowSize(tableDbName, loadedRowCount);
        checkImplicitClassification(file, tableDbName, columnNameList);
        return loadedRowCount;
    } catch (RuntimeException e) {
        handleXlsDataRegistartionFailureException(dataDirectory, file, tableDbName, e);
        // unreachable
        return -1;
    } catch (SQLException e) {
        handleWriteTableException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
        // unreachable
        return -1;
    } finally {
        closeResource(conn, ps);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) DfDataRow(org.dbflute.helper.dataset.DfDataRow) BatchUpdateException(java.sql.BatchUpdateException)

Example 70 with BatchUpdateException

use of java.sql.BatchUpdateException in project nifi by apache.

the class PutSQL method onBatchUpdateError.

private ExceptionHandler.OnError<FunctionContext, StatementFlowFileEnclosure> onBatchUpdateError(final ProcessContext context, final ProcessSession session, final RoutingResult result) {
    return RollbackOnFailure.createOnError((c, enclosure, r, e) -> {
        // If rollbackOnFailure is enabled, the error will be thrown as ProcessException instead.
        if (e instanceof BatchUpdateException && !c.isRollbackOnFailure()) {
            // If we get a BatchUpdateException, then we want to determine which FlowFile caused the failure,
            // and route that FlowFile to failure while routing those that finished processing to success and those
            // that have not yet been executed to retry.
            // Currently fragmented transaction does not use batch update.
            final int[] updateCounts = ((BatchUpdateException) e).getUpdateCounts();
            final List<FlowFile> batchFlowFiles = enclosure.getFlowFiles();
            // In the presence of a BatchUpdateException, the driver has the option of either stopping when an error
            // occurs, or continuing. If it continues, then it must account for all statements in the batch and for
            // those that fail return a Statement.EXECUTE_FAILED for the number of rows updated.
            // So we will iterate over all of the update counts returned. If any is equal to Statement.EXECUTE_FAILED,
            // we will route the corresponding FlowFile to failure. Otherwise, the FlowFile will go to success
            // unless it has not yet been processed (its index in the List > updateCounts.length).
            int failureCount = 0;
            int successCount = 0;
            int retryCount = 0;
            for (int i = 0; i < updateCounts.length; i++) {
                final int updateCount = updateCounts[i];
                final FlowFile flowFile = batchFlowFiles.get(i);
                if (updateCount == Statement.EXECUTE_FAILED) {
                    result.routeTo(flowFile, REL_FAILURE);
                    failureCount++;
                } else {
                    result.routeTo(flowFile, REL_SUCCESS);
                    successCount++;
                }
            }
            if (failureCount == 0) {
                // if no failures found, the driver decided not to execute the statements after the
                // failure, so route the last one to failure.
                final FlowFile failedFlowFile = batchFlowFiles.get(updateCounts.length);
                result.routeTo(failedFlowFile, REL_FAILURE);
                failureCount++;
            }
            if (updateCounts.length < batchFlowFiles.size()) {
                final List<FlowFile> unexecuted = batchFlowFiles.subList(updateCounts.length + 1, batchFlowFiles.size());
                for (final FlowFile flowFile : unexecuted) {
                    result.routeTo(flowFile, REL_RETRY);
                    retryCount++;
                }
            }
            getLogger().error("Failed to update database due to a failed batch update, {}. There were a total of {} FlowFiles that failed, {} that succeeded, " + "and {} that were not execute and will be routed to retry; ", new Object[] { e, failureCount, successCount, retryCount }, e);
            return;
        }
        // Apply default error handling and logging for other Exceptions.
        ExceptionHandler.OnError<RollbackOnFailure, FlowFileGroup> onGroupError = ExceptionHandler.createOnGroupError(context, session, result, REL_FAILURE, REL_RETRY);
        onGroupError = onGroupError.andThen((cl, il, rl, el) -> {
            switch(r.destination()) {
                case Failure:
                    getLogger().error("Failed to update database for {} due to {}; routing to failure", new Object[] { il.getFlowFiles(), e }, e);
                    break;
                case Retry:
                    getLogger().error("Failed to update database for {} due to {}; it is possible that retrying the operation will succeed, so routing to retry", new Object[] { il.getFlowFiles(), e }, e);
                    break;
            }
        });
        onGroupError.apply(c, enclosure, r, e);
    });
}
Also used : ExceptionHandler(org.apache.nifi.processor.util.pattern.ExceptionHandler) StandardValidators(org.apache.nifi.processor.util.StandardValidators) FetchFlowFiles(org.apache.nifi.processor.util.pattern.PartialFunctions.FetchFlowFiles) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) SQLNonTransientException(java.sql.SQLNonTransientException) Connection(java.sql.Connection) BiFunction(java.util.function.BiFunction) ExceptionHandler.createOnError(org.apache.nifi.processor.util.pattern.ExceptionHandler.createOnError) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ErrorTypes(org.apache.nifi.processor.util.pattern.ErrorTypes) RoutingResult(org.apache.nifi.processor.util.pattern.RoutingResult) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) ResultSet(java.sql.ResultSet) Map(java.util.Map) ReadsAttributes(org.apache.nifi.annotation.behavior.ReadsAttributes) ExceptionHandler(org.apache.nifi.processor.util.pattern.ExceptionHandler) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) PutGroup(org.apache.nifi.processor.util.pattern.PutGroup) FlowFile(org.apache.nifi.flowfile.FlowFile) FragmentAttributes(org.apache.nifi.flowfile.attributes.FragmentAttributes) Set(java.util.Set) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) PreparedStatement(java.sql.PreparedStatement) StandardCharsets(java.nio.charset.StandardCharsets) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) List(java.util.List) JdbcCommon(org.apache.nifi.processors.standard.util.JdbcCommon) Tags(org.apache.nifi.annotation.documentation.Tags) DBCPService(org.apache.nifi.dbcp.DBCPService) ReadsAttribute(org.apache.nifi.annotation.behavior.ReadsAttribute) RollbackOnFailure(org.apache.nifi.processor.util.pattern.RollbackOnFailure) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) BatchUpdateException(java.sql.BatchUpdateException) HashMap(java.util.HashMap) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) SQLException(java.sql.SQLException) Relationship(org.apache.nifi.processor.Relationship) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) AbstractSessionFactoryProcessor(org.apache.nifi.processor.AbstractSessionFactoryProcessor) PartialFunctions(org.apache.nifi.processor.util.pattern.PartialFunctions) FlowFileGroup(org.apache.nifi.processor.util.pattern.PartialFunctions.FlowFileGroup) ProcessContext(org.apache.nifi.processor.ProcessContext) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) TimeUnit(java.util.concurrent.TimeUnit) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) StreamUtils(org.apache.nifi.stream.io.StreamUtils) Statement(java.sql.Statement) BitSet(java.util.BitSet) Comparator(java.util.Comparator) InputStream(java.io.InputStream) FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileGroup(org.apache.nifi.processor.util.pattern.PartialFunctions.FlowFileGroup) RollbackOnFailure(org.apache.nifi.processor.util.pattern.RollbackOnFailure) BatchUpdateException(java.sql.BatchUpdateException)

Aggregations

BatchUpdateException (java.sql.BatchUpdateException)103 SQLException (java.sql.SQLException)39 PreparedStatement (java.sql.PreparedStatement)33 Statement (java.sql.Statement)22 ArrayList (java.util.ArrayList)19 Test (org.junit.Test)19 Connection (java.sql.Connection)17 Test (org.testng.annotations.Test)17 BaseTest (util.BaseTest)17 SerializedBatchUpdateException (util.SerializedBatchUpdateException)17 ResultSet (java.sql.ResultSet)13 List (java.util.List)12 CallableStatement (java.sql.CallableStatement)8 HashSet (java.util.HashSet)8 IgniteSQLException (org.apache.ignite.internal.processors.query.IgniteSQLException)7 HashMap (java.util.HashMap)6 Map (java.util.Map)5 CustomChangeException (liquibase.exception.CustomChangeException)5 DatabaseException (liquibase.exception.DatabaseException)5 SetupException (liquibase.exception.SetupException)5