use of org.hibernate.jdbc.Expectation in project hibernate-orm by hibernate.
the class AbstractEntityPersister method insert.
/**
* Perform an SQL INSERT.
* <p/>
* This for is used for all non-root tables as well as the root table
* in cases where the identifier value is known beforeQuery the insert occurs.
*/
protected void insert(final Serializable id, final Object[] fields, final boolean[] notNull, final int j, final String sql, final Object object, final SharedSessionContractImplementor session) throws HibernateException {
if (isInverseTable(j)) {
return;
}
// a non-null value, so the following is arguable:
if (isNullableTable(j) && isAllNull(fields, j)) {
return;
}
if (LOG.isTraceEnabled()) {
LOG.tracev("Inserting entity: {0}", MessageHelper.infoString(this, id, getFactory()));
if (j == 0 && isVersioned()) {
LOG.tracev("Version: {0}", Versioning.getVersion(fields, this));
}
}
// TODO : shouldn't inserts be Expectations.NONE?
final Expectation expectation = Expectations.appropriateExpectation(insertResultCheckStyles[j]);
// we can't batch joined inserts, *especially* not if it is an identity insert;
// nor can we batch statements where the expectation is based on an output param
final boolean useBatch = j == 0 && expectation.canBeBatched();
if (useBatch && inserBatchKey == null) {
inserBatchKey = new BasicBatchKey(getEntityName() + "#INSERT", expectation);
}
final boolean callable = isInsertCallable(j);
try {
// Render the SQL query
final PreparedStatement insert;
if (useBatch) {
insert = session.getJdbcCoordinator().getBatch(inserBatchKey).getBatchStatement(sql, callable);
} else {
insert = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
int index = 1;
index += expectation.prepare(insert);
// Write the values of fields onto the prepared statement - we MUST use the state at the time the
// insert was issued (cos of foreign key constraints). Not necessarily the object's current state
dehydrate(id, fields, null, notNull, propertyColumnInsertable, j, insert, session, index, false);
if (useBatch) {
session.getJdbcCoordinator().getBatch(inserBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(insert), insert, -1);
}
} catch (SQLException e) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw e;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(insert);
session.getJdbcCoordinator().afterStatementExecution();
}
}
} catch (SQLException e) {
throw getFactory().getSQLExceptionHelper().convert(e, "could not insert: " + MessageHelper.infoString(this), sql);
}
}
use of org.hibernate.jdbc.Expectation in project hibernate-orm by hibernate.
the class AbstractEntityPersister method delete.
/**
* Perform an SQL DELETE
*/
protected void delete(final Serializable id, final Object version, final int j, final Object object, final String sql, final SharedSessionContractImplementor session, final Object[] loadedState) throws HibernateException {
if (isInverseTable(j)) {
return;
}
final boolean useVersion = j == 0 && isVersioned();
final boolean callable = isDeleteCallable(j);
final Expectation expectation = Expectations.appropriateExpectation(deleteResultCheckStyles[j]);
final boolean useBatch = j == 0 && isBatchable() && expectation.canBeBatched();
if (useBatch && deleteBatchKey == null) {
deleteBatchKey = new BasicBatchKey(getEntityName() + "#DELETE", expectation);
}
final boolean traceEnabled = LOG.isTraceEnabled();
if (traceEnabled) {
LOG.tracev("Deleting entity: {0}", MessageHelper.infoString(this, id, getFactory()));
if (useVersion) {
LOG.tracev("Version: {0}", version);
}
}
if (isTableCascadeDeleteEnabled(j)) {
if (traceEnabled) {
LOG.tracev("Delete handled by foreign key constraint: {0}", getTableName(j));
}
//EARLY EXIT!
return;
}
try {
//Render the SQL query
PreparedStatement delete;
int index = 1;
if (useBatch) {
delete = session.getJdbcCoordinator().getBatch(deleteBatchKey).getBatchStatement(sql, callable);
} else {
delete = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
index += expectation.prepare(delete);
// Do the key. The key is immutable so we can use the _current_ object state - not necessarily
// the state at the time the delete was issued
getIdentifierType().nullSafeSet(delete, id, index, session);
index += getIdentifierColumnSpan();
if (useVersion) {
getVersionType().nullSafeSet(delete, version, index, session);
} else if (isAllOrDirtyOptLocking() && loadedState != null) {
boolean[] versionability = getPropertyVersionability();
Type[] types = getPropertyTypes();
for (int i = 0; i < entityMetamodel.getPropertySpan(); i++) {
if (isPropertyOfTable(i, j) && versionability[i]) {
// this property belongs to the table and it is not specifically
// excluded from optimistic locking by optimistic-lock="false"
boolean[] settable = types[i].toColumnNullness(loadedState[i], getFactory());
types[i].nullSafeSet(delete, loadedState[i], index, settable, session);
index += ArrayHelper.countTrue(settable);
}
}
}
if (useBatch) {
session.getJdbcCoordinator().getBatch(deleteBatchKey).addToBatch();
} else {
check(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(delete), id, j, expectation, delete);
}
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(delete);
session.getJdbcCoordinator().afterStatementExecution();
}
}
} catch (SQLException sqle) {
throw getFactory().getSQLExceptionHelper().convert(sqle, "could not delete: " + MessageHelper.infoString(this, id, getFactory()), sql);
}
}
use of org.hibernate.jdbc.Expectation in project hibernate-orm by hibernate.
the class OneToManyPersister method writeIndex.
private void writeIndex(PersistentCollection collection, Iterator entries, Serializable id, boolean resetIndex, SharedSessionContractImplementor session) {
// If one-to-many and inverse, still need to create the index. See HHH-5732.
if (isInverse && hasIndex && !indexContainsFormula && ArrayHelper.countTrue(indexColumnIsSettable) > 0) {
try {
if (entries.hasNext()) {
int nextIndex = resetIndex ? 0 : getSize(id, session);
Expectation expectation = Expectations.appropriateExpectation(getUpdateCheckStyle());
while (entries.hasNext()) {
final Object entry = entries.next();
if (entry != null && collection.entryExists(entry, nextIndex)) {
int offset = 1;
PreparedStatement st = null;
boolean callable = isUpdateCallable();
boolean useBatch = expectation.canBeBatched();
String sql = getSQLUpdateRowString();
if (useBatch) {
if (recreateBatchKey == null) {
recreateBatchKey = new BasicBatchKey(getRole() + "#RECREATE", expectation);
}
st = session.getJdbcCoordinator().getBatch(recreateBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
offset += expectation.prepare(st);
if (hasIdentifier) {
offset = writeIdentifier(st, collection.getIdentifier(entry, nextIndex), offset, session);
}
offset = writeIndex(st, collection.getIndex(entry, nextIndex, this), offset, session);
offset = writeElement(st, collection.getElement(entry), offset, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(recreateBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
}
nextIndex++;
}
}
} catch (SQLException sqle) {
throw sqlExceptionHelper.convert(sqle, "could not update collection: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLUpdateRowString());
}
}
}
use of org.hibernate.jdbc.Expectation in project hibernate-orm by hibernate.
the class AbstractCollectionPersister method remove.
@Override
public void remove(Serializable id, SharedSessionContractImplementor session) throws HibernateException {
if (!isInverse && isRowDeleteEnabled()) {
if (LOG.isDebugEnabled()) {
LOG.debugf("Deleting collection: %s", MessageHelper.collectionInfoString(this, id, getFactory()));
}
try {
int offset = 1;
PreparedStatement st = null;
Expectation expectation = Expectations.appropriateExpectation(getDeleteAllCheckStyle());
boolean callable = isDeleteAllCallable();
boolean useBatch = expectation.canBeBatched();
String sql = getSQLDeleteString();
if (useBatch) {
if (removeBatchKey == null) {
removeBatchKey = new BasicBatchKey(getRole() + "#REMOVE", expectation);
}
st = session.getJdbcCoordinator().getBatch(removeBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
offset += expectation.prepare(st);
writeKey(st, id, offset, session);
if (useBatch) {
session.getJdbcCoordinator().getBatch(removeBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
LOG.debug("Done deleting collection");
} catch (SQLException sqle) {
throw sqlExceptionHelper.convert(sqle, "could not delete collection: " + MessageHelper.collectionInfoString(this, id, getFactory()), getSQLDeleteString());
}
}
}
use of org.hibernate.jdbc.Expectation in project hibernate-orm by hibernate.
the class AbstractCollectionPersister method deleteRows.
@Override
public void deleteRows(PersistentCollection collection, Serializable id, SharedSessionContractImplementor session) throws HibernateException {
if (isInverse) {
return;
}
if (!isRowDeleteEnabled()) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debugf("Deleting rows of collection: %s", MessageHelper.collectionInfoString(this, collection, id, session));
}
boolean deleteByIndex = !isOneToMany() && hasIndex && !indexContainsFormula;
final Expectation expectation = Expectations.appropriateExpectation(getDeleteCheckStyle());
try {
// delete all the deleted entries
Iterator deletes = collection.getDeletes(this, !deleteByIndex);
if (deletes.hasNext()) {
int offset = 1;
int count = 0;
while (deletes.hasNext()) {
PreparedStatement st = null;
boolean callable = isDeleteCallable();
boolean useBatch = expectation.canBeBatched();
String sql = getSQLDeleteRowString();
if (useBatch) {
if (deleteBatchKey == null) {
deleteBatchKey = new BasicBatchKey(getRole() + "#DELETE", expectation);
}
st = session.getJdbcCoordinator().getBatch(deleteBatchKey).getBatchStatement(sql, callable);
} else {
st = session.getJdbcCoordinator().getStatementPreparer().prepareStatement(sql, callable);
}
try {
expectation.prepare(st);
Object entry = deletes.next();
int loc = offset;
if (hasIdentifier) {
writeIdentifier(st, entry, loc, session);
} else {
loc = writeKey(st, id, loc, session);
if (deleteByIndex) {
writeIndexToWhere(st, entry, loc, session);
} else {
writeElementToWhere(st, entry, loc, session);
}
}
if (useBatch) {
session.getJdbcCoordinator().getBatch(deleteBatchKey).addToBatch();
} else {
expectation.verifyOutcome(session.getJdbcCoordinator().getResultSetReturn().executeUpdate(st), st, -1);
}
count++;
} catch (SQLException sqle) {
if (useBatch) {
session.getJdbcCoordinator().abortBatch();
}
throw sqle;
} finally {
if (!useBatch) {
session.getJdbcCoordinator().getResourceRegistry().release(st);
session.getJdbcCoordinator().afterStatementExecution();
}
}
LOG.debugf("Done deleting collection rows: %s deleted", count);
}
} else {
LOG.debug("No rows to delete");
}
} catch (SQLException sqle) {
throw sqlExceptionHelper.convert(sqle, "could not delete collection rows: " + MessageHelper.collectionInfoString(this, collection, id, session), getSQLDeleteRowString());
}
}
Aggregations