use of org.hibernate.engine.jdbc.batch.internal.BasicBatchKey in project hibernate-orm by hibernate.
the class BasicCollectionPersister method doUpdateRows.
@Override
protected int doUpdateRows(Serializable id, PersistentCollection collection, SharedSessionContractImplementor session) throws HibernateException {
if (ArrayHelper.isAllFalse(elementColumnIsSettable)) {
return 0;
}
try {
PreparedStatement st = null;
Expectation expectation = Expectations.appropriateExpectation(getUpdateCheckStyle());
boolean callable = isUpdateCallable();
boolean useBatch = expectation.canBeBatched();
Iterator entries = collection.entries(this);
String sql = getSQLUpdateRowString();
int i = 0;
int count = 0;
while (entries.hasNext()) {
Object entry = entries.next();
if (collection.needsUpdating(entry, i, elementType)) {
int offset = 1;
if (useBatch) {
if (updateBatchKey == null) {
updateBatchKey = new BasicBatchKey(getRole() + "#UPDATE", expectation);
}
st = session.getJdbcCoordinator().getBatch(updateBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
offset += expectation.prepare(st);
int loc = writeElement(st, collection.getElement(entry), offset, session);
if (hasIdentifier) {
writeIdentifier(st, collection.getIdentifier(entry, i), loc, session);
} else {
loc = writeKey(st, id, loc, session);
if (hasIndex && !indexContainsFormula) {
writeIndexToWhere(st, collection.getIndex(entry, i, this), loc, session);
} else {
writeElementToWhere(st, collection.getSnapshotElement(entry, i), loc, session);
}
}
if (useBatch) {
session.getJdbcCoordinator().getBatch(updateBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
count++;
}
i++;
}
return count;
} catch (SQLException sqle) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(sqle, "could not update collection rows: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLUpdateRowString());
}
}
use of org.hibernate.engine.jdbc.batch.internal.BasicBatchKey in project hibernate-orm by hibernate.
the class OneToManyPersister method doUpdateRows.
@Override
protected int doUpdateRows(Serializable id, PersistentCollection collection, SharedSessionContractImplementor session) {
try {
int count = 0;
if (isRowDeleteEnabled()) {
final Expectation deleteExpectation = Expectations.appropriateExpectation(getDeleteCheckStyle());
final boolean useBatch = deleteExpectation.canBeBatched();
if (useBatch && deleteRowBatchKey == null) {
deleteRowBatchKey = new BasicBatchKey(getRole() + "#DELETEROW", deleteExpectation);
}
final String sql = getSQLDeleteRowString();
PreparedStatement st = null;
// update removed rows fks to null
try {
int i = 0;
Iterator entries = collection.entries(this);
int offset = 1;
while (entries.hasNext()) {
Object entry = entries.next();
if (collection.needsUpdating(entry, i, elementType)) {
// will still be issued when it used to be null
if (useBatch) {
st = session.getJdbcCoordinator().getBatch(deleteRowBatchKey).getBatchStatement(sql, isDeleteCallable());
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, isDeleteCallable());
}
int loc = writeKey(st, id, offset, session);
writeElementToWhere(st, collection.getSnapshotElement(entry, i), loc, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(deleteRowBatchKey).addToBatch();
} else {
deleteExpectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
count++;
}
i++;
}
} catch (SQLException e) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw e;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
}
if (isRowInsertEnabled()) {
final Expectation insertExpectation = Expectations.appropriateExpectation(getInsertCheckStyle());
boolean useBatch = insertExpectation.canBeBatched();
boolean callable = isInsertCallable();
if (useBatch && insertRowBatchKey == null) {
insertRowBatchKey = new BasicBatchKey(getRole() + "#INSERTROW", insertExpectation);
}
final String sql = getSQLInsertRowString();
PreparedStatement st = null;
// now update all changed or added rows fks
try {
int i = 0;
Iterator entries = collection.entries(this);
while (entries.hasNext()) {
Object entry = entries.next();
int offset = 1;
if (collection.needsUpdating(entry, i, elementType)) {
if (useBatch) {
st = session.getJdbcCoordinator().getBatch(insertRowBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
offset += insertExpectation.prepare(st);
int loc = writeKey(st, id, offset, session);
if (hasIndex && !indexContainsFormula) {
loc = writeIndexToWhere(st, collection.getIndex(entry, i, this), loc, session);
}
writeElementToWhere(st, collection.getElement(entry), loc, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(insertRowBatchKey).addToBatch();
} else {
insertExpectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
count++;
}
i++;
}
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
}
return count;
} catch (SQLException sqle) {
throw getFactory().getSQLExceptionHelper().convert(sqle, "could not update collection rows: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLInsertRowString());
}
}
use of org.hibernate.engine.jdbc.batch.internal.BasicBatchKey in project hibernate-orm by hibernate.
the class AbstractCollectionPersister method insertRows.
@Override
public void insertRows(PersistentCollection collection, Serializable id, SharedSessionContractImplementor session) throws HibernateException {
if (isInverse) {
return;
}
if (!isRowInsertEnabled()) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debugf("Inserting rows of collection: %s", MessageHelper.collectionInfoString(this, collection, id, session));
}
try {
// insert all the new entries
collection.preInsert(this);
Iterator entries = collection.entries(this);
Expectation expectation = Expectations.appropriateExpectation(getInsertCheckStyle());
boolean callable = isInsertCallable();
boolean useBatch = expectation.canBeBatched();
String sql = getSQLInsertRowString();
int i = 0;
int count = 0;
while (entries.hasNext()) {
int offset = 1;
Object entry = entries.next();
PreparedStatement st = null;
if (collection.needsInserting(entry, i, elementType)) {
if (useBatch) {
if (insertBatchKey == null) {
insertBatchKey = new BasicBatchKey(getRole() + "#INSERT", expectation);
}
if (st == null) {
st = session.getJdbcCoordinator().getBatch(insertBatchKey).getBatchStatement(sql, callable);
}
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
offset += expectation.prepare(st);
// TODO: copy/paste from recreate()
offset = writeKey(st, id, offset, session);
if (hasIdentifier) {
offset = writeIdentifier(st, collection.getIdentifier(entry, i), offset, session);
}
if (hasIndex) /* && !indexIsFormula */
{
offset = writeIndex(st, collection.getIndex(entry, i, this), offset, session);
}
writeElement(st, collection.getElement(entry), offset, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(insertBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
collection.afterRowInsert(this, entry, i);
count++;
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
}
i++;
}
LOG.debugf("Done inserting rows: %s inserted", count);
} catch (SQLException sqle) {
throw sqlExceptionHelper.convert(sqle, "could not insert collection rows: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLInsertRowString());
}
}
use of org.hibernate.engine.jdbc.batch.internal.BasicBatchKey in project hibernate-orm by hibernate.
the class AbstractCollectionPersister method recreate.
@Override
public void recreate(PersistentCollection collection, Serializable id, SharedSessionContractImplementor session) throws HibernateException {
if (isInverse) {
return;
}
if (!isRowInsertEnabled()) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debugf("Inserting collection: %s", MessageHelper.collectionInfoString(this, collection, id, session));
}
try {
// create all the new entries
Iterator entries = collection.entries(this);
if (entries.hasNext()) {
Expectation expectation = Expectations.appropriateExpectation(getInsertCheckStyle());
collection.preInsert(this);
int i = 0;
int count = 0;
while (entries.hasNext()) {
final Object entry = entries.next();
if (collection.entryExists(entry, i)) {
int offset = 1;
PreparedStatement st = null;
boolean callable = isInsertCallable();
boolean useBatch = expectation.canBeBatched();
String sql = getSQLInsertRowString();
if (useBatch) {
if (recreateBatchKey == null) {
recreateBatchKey = new BasicBatchKey(getRole() + "#RECREATE", expectation);
}
st = session.getJdbcCoordinator().getBatch(recreateBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
offset += expectation.prepare(st);
// TODO: copy/paste from insertRows()
int loc = writeKey(st, id, offset, session);
if (hasIdentifier) {
loc = writeIdentifier(st, collection.getIdentifier(entry, i), loc, session);
}
if (hasIndex) /* && !indexIsFormula */
{
loc = writeIndex(st, collection.getIndex(entry, i, this), loc, session);
}
loc = writeElement(st, collection.getElement(entry), loc, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(recreateBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
collection.afterRowInsert(this, entry, i);
count++;
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
}
i++;
}
LOG.debugf("Done inserting collection: %s rows inserted", count);
} else {
LOG.debug("Collection was empty");
}
} catch (SQLException sqle) {
throw sqlExceptionHelper.convert(sqle, "could not insert collection: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLInsertRowString());
}
}
use of org.hibernate.engine.jdbc.batch.internal.BasicBatchKey in project hibernate-orm by hibernate.
the class AbstractEntityPersister method update.
protected boolean update(final Serializable id, final Object[] fields, final Object[] oldFields, final Object rowId, final boolean[] includeProperty, final int j, final Object oldVersion, final Object object, final String sql, final SharedSessionContractImplementor session) throws HibernateException {
final Expectation expectation = Expectations.appropriateExpectation(updateResultCheckStyles[j]);
//note: updates to joined tables can't be batched...
final boolean useBatch = j == 0 && expectation.canBeBatched() && isBatchable();
if (useBatch && updateBatchKey == null) {
updateBatchKey = new BasicBatchKey(getEntityName() + "#UPDATE", expectation);
}
final boolean callable = isUpdateCallable(j);
final boolean useVersion = j == 0 && isVersioned();
if (LOG.isTraceEnabled()) {
LOG.tracev("Updating entity: {0}", MessageHelper.infoString(this, id, getFactory()));
if (useVersion) {
LOG.tracev("Existing version: {0} -> New version:{1}", oldVersion, fields[getVersionProperty()]);
}
}
try {
// starting index
int index = 1;
final PreparedStatement update;
if (useBatch) {
update = session.getJdbcCoordinator().getBatch(updateBatchKey).getBatchStatement(sql, callable);
} else {
update = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
index += expectation.prepare(update);
//Now write the values of fields onto the prepared statement
index = dehydrate(id, fields, rowId, includeProperty, propertyColumnUpdateable, j, update, session, index, true);
// Write any appropriate versioning conditional parameters
if (useVersion && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION) {
if (checkVersion(includeProperty)) {
getVersionType().nullSafeSet(update, oldVersion, index, session);
}
} else if (isAllOrDirtyOptLocking() && oldFields != null) {
//TODO: is this really necessary????
boolean[] versionability = getPropertyVersionability();
boolean[] includeOldField = entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.ALL ? getPropertyUpdateability() : includeProperty;
Type[] types = getPropertyTypes();
for (int i = 0; i < entityMetamodel.getPropertySpan(); i++) {
boolean include = includeOldField[i] && isPropertyOfTable(i, j) && //TODO: is this really necessary????
versionability[i];
if (include) {
boolean[] settable = types[i].toColumnNullness(oldFields[i], getFactory());
types[i].nullSafeSet(update, oldFields[i], index, settable, session);
index += ArrayHelper.countTrue(settable);
}
}
}
if (useBatch) {
session.getJdbcCoordinator().getBatch(updateBatchKey).addToBatch();
return true;
} else {
return check(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(update), id, j, expectation, update);
}
} catch (SQLException e) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw e;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(update);
session.getJdbcCoordinator().afterStatementExecution();
}
}
} catch (SQLException e) {
throw getFactory().getSQLExceptionHelper().convert(e, "could not update: " + MessageHelper.infoString(this, id, getFactory()), sql);
}
}
Aggregations