use of org.apache.cayenne.log.JdbcEventLogger in project cayenne by apache.
the class BatchAction method runAsBatch.
protected void runAsBatch(Connection con, BatchTranslator translator, OperationObserver delegate) throws SQLException, Exception {
String sql = translator.getSql();
JdbcEventLogger logger = dataNode.getJdbcEventLogger();
boolean isLoggable = logger.isLoggable();
// log batch SQL execution
logger.log(sql);
// run batch
DbAdapter adapter = dataNode.getAdapter();
try (PreparedStatement statement = con.prepareStatement(sql)) {
for (BatchQueryRow row : query.getRows()) {
DbAttributeBinding[] bindings = translator.updateBindings(row);
logger.logQueryParameters("batch bind", bindings);
bind(adapter, statement, bindings);
statement.addBatch();
}
// execute the whole batch
int[] results = statement.executeBatch();
delegate.nextBatchCount(query, results);
if (isLoggable) {
int totalUpdateCount = 0;
for (int result : results) {
// Statement.EXECUTE_FAILED
if (result < 0) {
totalUpdateCount = Statement.SUCCESS_NO_INFO;
break;
}
totalUpdateCount += result;
}
logger.logUpdateCount(totalUpdateCount);
}
}
}
use of org.apache.cayenne.log.JdbcEventLogger in project cayenne by apache.
the class AbstractToDbToken method executeSql.
void executeSql(MergerContext mergerContext, String sql) {
JdbcEventLogger logger = mergerContext.getDataNode().getJdbcEventLogger();
logger.log(sql);
try (Connection conn = mergerContext.getDataNode().getDataSource().getConnection()) {
try (Statement st = conn.createStatement()) {
st.execute(sql);
}
} catch (SQLException e) {
mergerContext.getValidationResult().addFailure(new SimpleValidationFailure(sql, e.getMessage()));
logger.logQueryError(e);
}
}
use of org.apache.cayenne.log.JdbcEventLogger in project cayenne by apache.
the class BatchAction method runAsIndividualQueries.
/**
* Executes batch as individual queries over the same prepared statement.
*/
protected void runAsIndividualQueries(Connection connection, BatchTranslator translator, OperationObserver delegate, boolean generatesKeys) throws SQLException, Exception {
JdbcEventLogger logger = dataNode.getJdbcEventLogger();
boolean useOptimisticLock = query.isUsingOptimisticLocking();
String queryStr = translator.getSql();
// log batch SQL execution
logger.log(queryStr);
// run batch queries one by one
DbAdapter adapter = dataNode.getAdapter();
try (PreparedStatement statement = prepareStatement(connection, queryStr, adapter, generatesKeys)) {
for (BatchQueryRow row : query.getRows()) {
DbAttributeBinding[] bindings = translator.updateBindings(row);
logger.logQueryParameters("bind", bindings);
bind(adapter, statement, bindings);
int updated = statement.executeUpdate();
if (useOptimisticLock && updated != 1) {
throw new OptimisticLockException(row.getObjectId(), query.getDbEntity(), queryStr, row.getQualifier());
}
delegate.nextCount(query, updated);
if (generatesKeys) {
processGeneratedKeys(statement, delegate, row);
}
logger.logUpdateCount(updated);
}
}
}
use of org.apache.cayenne.log.JdbcEventLogger in project cayenne by apache.
the class SelectAction method performAction.
@SuppressWarnings({ "unchecked", "rawtypes", "resource" })
@Override
public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception {
final long t1 = System.currentTimeMillis();
JdbcEventLogger logger = dataNode.getJdbcEventLogger();
SelectTranslator translator = dataNode.selectTranslator(query);
final String sql = translator.getSql();
final DbAttributeBinding[] bindings = translator.getBindings();
logger.logQuery(sql, bindings);
PreparedStatement statement = connection.prepareStatement(sql);
bind(dataNode.getAdapter(), statement, bindings);
int fetchSize = queryMetadata.getStatementFetchSize();
if (fetchSize != 0) {
statement.setFetchSize(fetchSize);
}
ResultSet rs;
// exception happens
try {
rs = statement.executeQuery();
} catch (Exception ex) {
statement.close();
throw ex;
}
RowDescriptor descriptor = new RowDescriptorBuilder().setColumns(translator.getResultColumns()).getDescriptor(dataNode.getAdapter().getExtendedTypes());
RowReader<?> rowReader = dataNode.rowReader(descriptor, queryMetadata, translator.getAttributeOverrides());
ResultIterator it = new JDBCResultIterator(statement, rs, rowReader);
it = forIteratedResult(it, observer, connection, t1, sql);
it = forSuppressedDistinct(it, translator);
it = forFetchLimit(it, translator);
if (observer.isIteratedResult()) {
try {
observer.nextRows(query, it);
} catch (Exception ex) {
it.close();
throw ex;
}
} else {
List<DataRow> resultRows;
try {
resultRows = it.allRows();
} finally {
it.close();
}
dataNode.getJdbcEventLogger().logSelectCount(resultRows.size(), System.currentTimeMillis() - t1, sql);
observer.nextRows(query, resultRows);
}
}
Aggregations