use of org.datanucleus.exceptions.NucleusException in project datanucleus-rdbms by datanucleus.
the class QueryToSQLMapper method processPrimaryExpression.
/* (non-Javadoc)
* @see org.datanucleus.query.evaluator.AbstractExpressionEvaluator#processPrimaryExpression(org.datanucleus.query.expression.PrimaryExpression)
*/
protected Object processPrimaryExpression(PrimaryExpression expr) {
SQLExpression sqlExpr = null;
if (expr.getLeft() != null) {
if (expr.getLeft() instanceof DyadicExpression && expr.getLeft().getOperator() == Expression.OP_CAST) {
String exprCastName = null;
if (expr.getLeft().getLeft() instanceof PrimaryExpression) {
exprCastName = "CAST_" + ((PrimaryExpression) expr.getLeft().getLeft()).getId();
} else if (expr.getLeft().getLeft() instanceof VariableExpression) {
exprCastName = "CAST_" + ((VariableExpression) expr.getLeft().getLeft()).getId();
} else if (expr.getLeft().getLeft() instanceof InvokeExpression) {
exprCastName = "CAST_" + expr.getLeft().getLeft();
} else {
throw new NucleusException("Don't currently support cast of " + expr.getLeft().getLeft());
}
expr.getLeft().getLeft().evaluate(this);
sqlExpr = stack.pop();
JavaTypeMapping mapping = sqlExpr.getJavaTypeMapping();
if (mapping instanceof EmbeddedMapping) {
// Cast of an embedded field, so use same table
// Extract what we are casting it to
Literal castLitExpr = (Literal) expr.getLeft().getRight();
Class castType = resolveClass((String) castLitExpr.getLiteral());
AbstractClassMetaData castCmd = ec.getMetaDataManager().getMetaDataForClass(castType, clr);
JavaTypeMapping discMapping = ((EmbeddedMapping) mapping).getDiscriminatorMapping();
if (discMapping != null) {
// Should have a discriminator always when casting this
SQLExpression discExpr = exprFactory.newExpression(stmt, sqlExpr.getSQLTable(), discMapping);
Object discVal = castCmd.getDiscriminatorValue();
SQLExpression discValExpr = exprFactory.newLiteral(stmt, discMapping, discVal);
BooleanExpression discRestrictExpr = discExpr.eq(discValExpr);
Iterator<String> subclassIter = storeMgr.getSubClassesForClass(castType.getName(), true, clr).iterator();
while (subclassIter.hasNext()) {
String subclassName = subclassIter.next();
AbstractClassMetaData subtypeCmd = storeMgr.getMetaDataManager().getMetaDataForClass(subclassName, clr);
discVal = subtypeCmd.getDiscriminatorValue();
discValExpr = exprFactory.newLiteral(stmt, discMapping, discVal);
BooleanExpression subtypeExpr = discExpr.eq(discValExpr);
discRestrictExpr = discRestrictExpr.ior(subtypeExpr);
}
stmt.whereAnd(discRestrictExpr, true);
}
SQLTableMapping tblMapping = new SQLTableMapping(sqlExpr.getSQLTable(), castCmd, sqlExpr.getJavaTypeMapping());
setSQLTableMappingForAlias(exprCastName, tblMapping);
SQLTableMapping sqlMapping = getSQLTableMappingForPrimaryExpression(stmt, exprCastName, expr, Boolean.FALSE);
if (sqlMapping == null) {
throw new NucleusException("PrimaryExpression " + expr + " is not yet supported");
}
sqlExpr = exprFactory.newExpression(stmt, sqlMapping.table, sqlMapping.mapping);
stack.push(sqlExpr);
return sqlExpr;
}
// Evaluate the cast
expr.getLeft().evaluate(this);
sqlExpr = stack.pop();
// Extract what we are casting it to
Literal castLitExpr = (Literal) expr.getLeft().getRight();
AbstractClassMetaData castCmd = ec.getMetaDataManager().getMetaDataForClass(resolveClass((String) castLitExpr.getLiteral()), clr);
SQLTableMapping tblMapping = new SQLTableMapping(sqlExpr.getSQLTable(), castCmd, sqlExpr.getJavaTypeMapping());
setSQLTableMappingForAlias(exprCastName, tblMapping);
SQLTableMapping sqlMapping = getSQLTableMappingForPrimaryExpression(stmt, exprCastName, expr, Boolean.FALSE);
if (sqlMapping == null) {
throw new NucleusException("PrimaryExpression " + expr + " is not yet supported");
}
sqlExpr = exprFactory.newExpression(stmt, sqlMapping.table, sqlMapping.mapping);
stack.push(sqlExpr);
return sqlExpr;
} else if (expr.getLeft() instanceof ParameterExpression) {
// "{paramExpr}.field[.field[.field]]"
// Need parameter values to process this
setNotPrecompilable();
ParameterExpression paramExpr = (ParameterExpression) expr.getLeft();
Symbol paramSym = compilation.getSymbolTable().getSymbol(paramExpr.getId());
if (paramSym.getValueType() != null && paramSym.getValueType().isArray()) {
// Special case : array "methods" (particularly "length")
String first = expr.getTuples().get(0);
processParameterExpression(paramExpr, true);
SQLExpression paramSqlExpr = stack.pop();
sqlExpr = exprFactory.invokeMethod(stmt, "ARRAY", first, paramSqlExpr, null);
stack.push(sqlExpr);
return sqlExpr;
}
// Create Literal for the parameter (since we need to perform operations on it)
processParameterExpression(paramExpr, true);
SQLExpression paramSqlExpr = stack.pop();
SQLLiteral lit = (SQLLiteral) paramSqlExpr;
Object paramValue = lit.getValue();
List<String> tuples = expr.getTuples();
Iterator<String> tuplesIter = tuples.iterator();
Object objValue = paramValue;
while (tuplesIter.hasNext()) {
String fieldName = tuplesIter.next();
if (objValue == null) {
NucleusLogger.QUERY.warn(">> Compilation of " + expr + " : need to direct through field \"" + fieldName + "\" on null value, hence not compilable!");
// Null value, and we have further path to navigate TODO Handle this "NPE"
break;
}
objValue = getValueForObjectField(objValue, fieldName);
// Using literal value of parameter, so cannot precompile it
setNotPrecompilable();
}
if (objValue == null) {
sqlExpr = exprFactory.newLiteral(stmt, null, null);
stack.push(sqlExpr);
return sqlExpr;
}
JavaTypeMapping m = exprFactory.getMappingForType(objValue.getClass(), false);
sqlExpr = exprFactory.newLiteral(stmt, m, objValue);
stack.push(sqlExpr);
return sqlExpr;
} else if (expr.getLeft() instanceof VariableExpression) {
// "{varExpr}.field[.field[.field]]"
VariableExpression varExpr = (VariableExpression) expr.getLeft();
processVariableExpression(varExpr);
SQLExpression varSqlExpr = stack.pop();
if (varSqlExpr instanceof UnboundExpression) {
// Bind as CROSS JOIN for now
processUnboundExpression((UnboundExpression) varSqlExpr);
varSqlExpr = stack.pop();
}
Class varType = clr.classForName(varSqlExpr.getJavaTypeMapping().getType());
if (varSqlExpr.getSQLStatement() == stmt.getParentStatement()) {
// Use parent mapper to get the mapping for this field since it has the table
SQLTableMapping sqlMapping = parentMapper.getSQLTableMappingForPrimaryExpression(stmt, null, expr, Boolean.FALSE);
if (sqlMapping == null) {
throw new NucleusException("PrimaryExpression " + expr.getId() + " is not yet supported");
}
// TODO Cater for the table required to join to not being the primary table of the outer query
// This should check on
// getDatastoreAdapter().supportsOption(RDBMSAdapter.ACCESS_PARENTQUERY_IN_SUBQUERY))
sqlExpr = exprFactory.newExpression(varSqlExpr.getSQLStatement(), sqlMapping.table, sqlMapping.mapping);
stack.push(sqlExpr);
return sqlExpr;
}
SQLTableMapping varTblMapping = getSQLTableMappingForAlias(varExpr.getId());
if (varTblMapping == null) {
throw new NucleusUserException("Variable " + varExpr.getId() + " is not yet bound, so cannot get field " + expr.getId());
}
if (varTblMapping.cmd == null) {
throw new NucleusUserException("Variable " + varExpr.getId() + " of type " + varType.getName() + " cannot evaluate " + expr.getId());
}
SQLTableMapping sqlMapping = getSQLTableMappingForPrimaryExpression(varSqlExpr.getSQLStatement(), varExpr.getId(), expr, Boolean.FALSE);
sqlExpr = exprFactory.newExpression(sqlMapping.table.getSQLStatement(), sqlMapping.table, sqlMapping.mapping);
stack.push(sqlExpr);
return sqlExpr;
} else if (expr.getLeft() instanceof InvokeExpression) {
InvokeExpression invokeExpr = (InvokeExpression) expr.getLeft();
SQLExpression invokedSqlExpr = getInvokedSqlExpressionForInvokeExpression(invokeExpr);
processInvokeExpression(invokeExpr, invokedSqlExpr);
SQLExpression invokeSqlExpr = stack.pop();
Table tbl = invokeSqlExpr.getSQLTable().getTable();
if (expr.getTuples().size() > 1) {
throw new NucleusUserException("Dont currently support evaluating " + expr.getId() + " on " + invokeSqlExpr);
}
SQLTable invokeSqlTbl = invokeSqlExpr.getSQLTable();
if (invokedSqlExpr.getJavaTypeMapping() instanceof OptionalMapping && invokeExpr.getOperation().equals("get") && expr.getTuples().size() == 1) {
OptionalMapping opMapping = (OptionalMapping) invokedSqlExpr.getJavaTypeMapping();
if (opMapping.getWrappedMapping() instanceof PersistableMapping) {
// Special case of Optional.get().{field}, so we need to join to the related table
AbstractMemberMetaData mmd = invokedSqlExpr.getJavaTypeMapping().getMemberMetaData();
AbstractClassMetaData otherCmd = ec.getMetaDataManager().getMetaDataForClass(mmd.getCollection().getElementType(), clr);
Table otherTbl = storeMgr.getDatastoreClass(otherCmd.getFullClassName(), clr);
// Optional type so do LEFT OUTER JOIN since if it is null then we would eliminate all other results
invokeSqlTbl = stmt.join(JoinType.LEFT_OUTER_JOIN, invokeSqlExpr.getSQLTable(), opMapping.getWrappedMapping(), otherTbl, null, otherTbl.getIdMapping(), null, null, true);
tbl = invokeSqlTbl.getTable();
}
}
if (tbl instanceof DatastoreClass) {
// Table of a class, so assume to have field in the table of the class
// TODO Allow joins to superclasses if required
JavaTypeMapping mapping = ((DatastoreClass) tbl).getMemberMapping(expr.getId());
if (mapping == null) {
throw new NucleusUserException("Dont currently support evaluating " + expr.getId() + " on " + invokeSqlExpr + ". The field " + expr.getId() + " doesnt exist in table " + tbl);
}
sqlExpr = exprFactory.newExpression(stmt, invokeSqlTbl, mapping);
stack.push(sqlExpr);
return sqlExpr;
} else if (tbl instanceof JoinTable) {
if (invokeSqlExpr.getJavaTypeMapping() instanceof EmbeddedMapping) {
// Table containing an embedded element/key/value so assume we have a column in the join table
EmbeddedMapping embMapping = (EmbeddedMapping) invokeSqlExpr.getJavaTypeMapping();
JavaTypeMapping mapping = embMapping.getJavaTypeMapping(expr.getId());
if (mapping == null) {
throw new NucleusUserException("Dont currently support evaluating " + expr.getId() + " on " + invokeSqlExpr + ". The field " + expr.getId() + " doesnt exist in table " + tbl);
}
sqlExpr = exprFactory.newExpression(stmt, invokeSqlTbl, mapping);
stack.push(sqlExpr);
return sqlExpr;
}
}
throw new NucleusUserException("Dont currently support evaluating " + expr.getId() + " on " + invokeSqlExpr + " with invoke having table of " + tbl);
} else {
throw new NucleusUserException("Dont currently support PrimaryExpression with 'left' of " + expr.getLeft());
}
}
// Real primary expression ("field.field", "alias.field.field" etc)
SQLTableMapping sqlMapping = getSQLTableMappingForPrimaryExpression(stmt, null, expr, null);
if (sqlMapping == null) {
throw new NucleusException("PrimaryExpression " + expr.getId() + " is not yet supported");
}
sqlExpr = exprFactory.newExpression(stmt, sqlMapping.table, sqlMapping.mapping);
if (sqlMapping.mmd != null && sqlExpr instanceof MapExpression) {
// This sqlMapping is for something joined in a FROM clause, so set the alias on the returned MapExpression to avoid doing the same joins
String alias = getAliasForSQLTableMapping(sqlMapping);
if (alias == null && parentMapper != null) {
alias = parentMapper.getAliasForSQLTableMapping(sqlMapping);
}
((MapExpression) sqlExpr).setAliasForMapTable(alias);
}
stack.push(sqlExpr);
return sqlExpr;
}
use of org.datanucleus.exceptions.NucleusException in project datanucleus-rdbms by datanucleus.
the class LocateBulkRequest method getStatement.
protected String getStatement(DatastoreClass table, DNStateManager[] sms, boolean lock) {
RDBMSStoreManager storeMgr = table.getStoreManager();
ClassLoaderResolver clr = storeMgr.getNucleusContext().getClassLoaderResolver(null);
SQLExpressionFactory exprFactory = storeMgr.getSQLExpressionFactory();
cmd = storeMgr.getMetaDataManager().getMetaDataForClass(table.getType(), clr);
ExecutionContext ec = sms[0].getExecutionContext();
SelectStatement sqlStatement = new SelectStatement(storeMgr, table, null, null);
// SELECT fields we require
resultMapping = new StatementClassMapping();
// a). PK fields
if (table.getIdentityType() == IdentityType.DATASTORE) {
JavaTypeMapping datastoreIdMapping = table.getSurrogateMapping(SurrogateColumnType.DATASTORE_ID, false);
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), datastoreIdMapping);
int[] cols = sqlStatement.select(expr, null);
StatementMappingIndex datastoreIdx = new StatementMappingIndex(datastoreIdMapping);
datastoreIdx.setColumnPositions(cols);
resultMapping.addMappingForMember(SurrogateColumnType.DATASTORE_ID.getFieldNumber(), datastoreIdx);
} else if (table.getIdentityType() == IdentityType.APPLICATION) {
int[] pkNums = cmd.getPKMemberPositions();
for (int i = 0; i < pkNums.length; i++) {
AbstractMemberMetaData mmd = cmd.getMetaDataForManagedMemberAtAbsolutePosition(pkNums[i]);
JavaTypeMapping pkMapping = table.getMemberMappingInDatastoreClass(mmd);
if (pkMapping == null) {
pkMapping = table.getMemberMapping(mmd);
}
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), pkMapping);
int[] cols = sqlStatement.select(expr, null);
StatementMappingIndex pkIdx = new StatementMappingIndex(pkMapping);
pkIdx.setColumnPositions(cols);
resultMapping.addMappingForMember(mmd.getAbsoluteFieldNumber(), pkIdx);
}
} else {
throw new NucleusUserException("Cannot locate objects using nondurable identity");
}
JavaTypeMapping verMapping = table.getSurrogateMapping(SurrogateColumnType.VERSION, false);
if (verMapping != null) {
VersionMetaData currentVermd = table.getVersionMetaData();
if (currentVermd != null && currentVermd.getMemberName() == null) {
// Surrogate version column
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), verMapping);
int[] cols = sqlStatement.select(expr, null);
StatementMappingIndex mapIdx = new StatementMappingIndex(verMapping);
mapIdx.setColumnPositions(cols);
resultMapping.addMappingForMember(SurrogateColumnType.VERSION.getFieldNumber(), mapIdx);
}
}
int[] nonPkFieldNums = cmd.getNonPKMemberPositions();
if (nonPkFieldNums != null) {
for (int i = 0; i < nonPkFieldNums.length; i++) {
AbstractMemberMetaData mmd = cmd.getMetaDataForManagedMemberAtAbsolutePosition(nonPkFieldNums[i]);
JavaTypeMapping mapping = table.getMemberMapping(mmd);
if (mapping != null && mapping.includeInFetchStatement()) {
if (mapping instanceof PersistableMapping) {
// Ignore 1-1/N-1 for now
continue;
}
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), mapping);
int[] cols = sqlStatement.select(expr, null);
StatementMappingIndex mapIdx = new StatementMappingIndex(mapping);
mapIdx.setColumnPositions(cols);
resultMapping.addMappingForMember(mmd.getAbsoluteFieldNumber(), mapIdx);
}
}
}
JavaTypeMapping multitenancyMapping = table.getSurrogateMapping(SurrogateColumnType.MULTITENANCY, false);
if (multitenancyMapping != null) {
// Add WHERE clause for multi-tenancy
SQLExpression tenantExpr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), multitenancyMapping);
String[] tenantReadIds = storeMgr.getNucleusContext().getTenantReadIds(sms[0].getExecutionContext());
if (tenantReadIds != null && tenantReadIds.length > 0) {
// Add IN clause with values
SQLExpression[] readIdExprs = new SQLExpression[tenantReadIds.length];
for (int i = 0; i < tenantReadIds.length; i++) {
readIdExprs[i] = sqlStatement.getSQLExpressionFactory().newLiteral(sqlStatement, multitenancyMapping, tenantReadIds[i].trim());
}
sqlStatement.whereAnd(new InExpression(tenantExpr, readIdExprs), true);
} else {
// Add EQ expression for tenantId TODO Use a parameter for this and set in execution
SQLExpression tenantVal = exprFactory.newLiteral(sqlStatement, multitenancyMapping, ec.getTenantId());
sqlStatement.whereAnd(tenantExpr.eq(tenantVal), true);
}
}
JavaTypeMapping softDeleteMapping = table.getSurrogateMapping(SurrogateColumnType.SOFTDELETE, false);
if (softDeleteMapping != null) {
// Add WHERE clause restricting to soft-delete unset
SQLExpression softDeleteExpr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), softDeleteMapping);
SQLExpression softDeleteVal = exprFactory.newLiteral(sqlStatement, softDeleteMapping, Boolean.FALSE);
sqlStatement.whereAnd(softDeleteExpr.eq(softDeleteVal), true);
}
// Add WHERE clause restricting to the identities of the objects
mappingDefinitions = new StatementClassMapping[sms.length];
int inputParamNum = 1;
for (int i = 0; i < sms.length; i++) {
mappingDefinitions[i] = new StatementClassMapping();
if (table.getIdentityType() == IdentityType.DATASTORE) {
// Datastore identity value for input
JavaTypeMapping datastoreIdMapping = table.getSurrogateMapping(SurrogateColumnType.DATASTORE_ID, false);
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), datastoreIdMapping);
SQLExpression val = exprFactory.newLiteralParameter(sqlStatement, datastoreIdMapping, null, "ID");
sqlStatement.whereOr(expr.eq(val), true);
StatementMappingIndex datastoreIdx = new StatementMappingIndex(datastoreIdMapping);
mappingDefinitions[i].addMappingForMember(SurrogateColumnType.DATASTORE_ID.getFieldNumber(), datastoreIdx);
datastoreIdx.addParameterOccurrence(new int[] { inputParamNum++ });
} else if (table.getIdentityType() == IdentityType.APPLICATION) {
// Application identity value(s) for input
BooleanExpression pkExpr = null;
int[] pkNums = cmd.getPKMemberPositions();
for (int j = 0; j < pkNums.length; j++) {
AbstractMemberMetaData mmd = cmd.getMetaDataForManagedMemberAtAbsolutePosition(pkNums[j]);
JavaTypeMapping pkMapping = table.getMemberMappingInDatastoreClass(mmd);
if (pkMapping == null) {
pkMapping = table.getMemberMapping(mmd);
}
SQLExpression expr = exprFactory.newExpression(sqlStatement, sqlStatement.getPrimaryTable(), pkMapping);
SQLExpression val = exprFactory.newLiteralParameter(sqlStatement, pkMapping, null, "PK" + j);
BooleanExpression fieldEqExpr = expr.eq(val);
if (pkExpr == null) {
pkExpr = fieldEqExpr;
} else {
pkExpr = pkExpr.and(fieldEqExpr);
}
StatementMappingIndex pkIdx = new StatementMappingIndex(pkMapping);
mappingDefinitions[i].addMappingForMember(mmd.getAbsoluteFieldNumber(), pkIdx);
int[] inputParams = new int[pkMapping.getNumberOfColumnMappings()];
for (int k = 0; k < pkMapping.getNumberOfColumnMappings(); k++) {
inputParams[k] = inputParamNum++;
}
pkIdx.addParameterOccurrence(inputParams);
}
if (pkExpr == null) {
throw new NucleusException("Unable to generate PK expression for WHERE clause of locate statement");
}
pkExpr = (BooleanExpression) pkExpr.encloseInParentheses();
sqlStatement.whereOr(pkExpr, true);
}
}
// Generate the appropriate JDBC statement allowing for locking
if (lock) {
sqlStatement.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, Boolean.TRUE);
return sqlStatement.getSQLText().toSQL();
}
return sqlStatement.getSQLText().toSQL();
}
use of org.datanucleus.exceptions.NucleusException in project datanucleus-rdbms by datanucleus.
the class RDBMSSchemaHandler method deleteDatabase.
/* (non-Javadoc)
* @see org.datanucleus.store.schema.AbstractStoreSchemaHandler#deleteSchema(java.lang.String, java.lang.String, java.util.Properties, java.lang.Object)
*/
@Override
public void deleteDatabase(String catalogName, String schemaName, Properties props, Object connection) {
try {
String stmtText = getDatastoreAdapter().getDropDatabaseStatement(catalogName, schemaName);
ManagedConnection mconn = null;
Connection conn = (Connection) connection;
if (connection == null) {
mconn = storeMgr.getConnectionManager().getConnection(TransactionIsolation.NONE);
conn = (Connection) mconn.getConnection();
}
Statement stmt = null;
try {
stmt = conn.createStatement();
NucleusLogger.DATASTORE_SCHEMA.debug(stmtText);
boolean success = stmt.execute(stmtText);
NucleusLogger.DATASTORE_SCHEMA.debug("deleteDatabase returned " + success);
} catch (SQLException sqle) {
NucleusLogger.DATASTORE_SCHEMA.error("Exception thrown deleting database cat=" + catalogName + " sch=" + schemaName, sqle);
throw new NucleusException("Exception thrown in deleteDatabase. See the log for full details : " + sqle.getMessage());
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException sqle) {
}
}
if (mconn != null) {
mconn.release();
}
}
} catch (UnsupportedOperationException uoe) {
return;
}
}
use of org.datanucleus.exceptions.NucleusException in project datanucleus-rdbms by datanucleus.
the class DeleteRequest method execute.
/**
* Method performing the deletion of the record from the datastore.
* Takes the constructed deletion query and populates with the specific record information.
* @param sm StateManager for the record to be deleted.
*/
public void execute(DNStateManager sm) {
if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
// Debug information about what we are deleting
NucleusLogger.PERSISTENCE.debug(Localiser.msg("052210", IdentityUtils.getPersistableIdentityForId(sm.getInternalObjectId()), table));
}
// Process all related fields first
// a). Delete any dependent objects
// b). Null any non-dependent objects with FK at other side
ClassLoaderResolver clr = sm.getExecutionContext().getClassLoaderResolver();
Set relatedObjectsToDelete = null;
if (mappingCallbacks != null) {
for (MappingCallbacks m : mappingCallbacks) {
if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
NucleusLogger.PERSISTENCE.debug(Localiser.msg("052212", IdentityUtils.getPersistableIdentityForId(sm.getInternalObjectId()), ((JavaTypeMapping) m).getMemberMetaData().getFullFieldName()));
}
m.preDelete(sm);
// Check for any dependent related 1-1 objects where we hold the FK and where the object hasn't been deleted.
// This can happen if this DeleteRequest was triggered by delete-orphans and so the related object has to be deleted *after* this object.
// It's likely we could do this better by using AttachFieldManager and just marking the "orphan" (i.e this object) as deleted
// (see AttachFieldManager TODO regarding when not copying)
JavaTypeMapping mapping = (JavaTypeMapping) m;
AbstractMemberMetaData mmd = mapping.getMemberMetaData();
RelationType relationType = mmd.getRelationType(clr);
if (mmd.isDependent() && (relationType == RelationType.ONE_TO_ONE_UNI || (relationType == RelationType.ONE_TO_ONE_BI && mmd.getMappedBy() == null))) {
try {
sm.isLoaded(mmd.getAbsoluteFieldNumber());
Object relatedPc = sm.provideField(mmd.getAbsoluteFieldNumber());
boolean relatedObjectDeleted = sm.getExecutionContext().getApiAdapter().isDeleted(relatedPc);
if (!relatedObjectDeleted) {
if (relatedObjectsToDelete == null) {
relatedObjectsToDelete = new HashSet();
}
relatedObjectsToDelete.add(relatedPc);
}
} catch (// Should be XXXObjectNotFoundException but dont want to use JDO class
Exception e) {
}
}
}
}
// and cater for other cases, in particular persistent interfaces
if (oneToOneNonOwnerFields != null && oneToOneNonOwnerFields.length > 0) {
for (int i = 0; i < oneToOneNonOwnerFields.length; i++) {
updateOneToOneBidirectionalOwnerObjectForField(sm, oneToOneNonOwnerFields[i]);
}
}
// Choose the statement based on whether optimistic or not
String stmt = null;
ExecutionContext ec = sm.getExecutionContext();
RDBMSStoreManager storeMgr = table.getStoreManager();
boolean optimisticChecks = false;
if (table.getSurrogateColumn(SurrogateColumnType.SOFTDELETE) != null) {
stmt = softDeleteStmt;
} else {
optimisticChecks = (versionChecks && ec.getTransaction().getOptimistic());
stmt = optimisticChecks ? deleteStmtOptimistic : deleteStmt;
}
// Process the delete of this object
try {
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
SQLController sqlControl = storeMgr.getSQLController();
try {
// Perform the delete
boolean batch = true;
if (optimisticChecks || !ec.getTransaction().isActive()) {
// Turn OFF batching if doing optimistic checks (since we need the result of the delete)
// or if using nontransactional writes (since we want it sending to the datastore now)
batch = false;
}
PreparedStatement ps = sqlControl.getStatementForUpdate(mconn, stmt, batch);
try {
// provide WHERE clause field(s)
if (cmd.getIdentityType() == IdentityType.DATASTORE) {
StatementMappingIndex mapIdx = mappingStatementIndex.getWhereDatastoreId();
for (int i = 0; i < mapIdx.getNumberOfParameterOccurrences(); i++) {
table.getSurrogateMapping(SurrogateColumnType.DATASTORE_ID, false).setObject(ec, ps, mapIdx.getParameterPositionsForOccurrence(i), sm.getInternalObjectId());
}
} else {
StatementClassMapping mappingDefinition = new StatementClassMapping();
StatementMappingIndex[] idxs = mappingStatementIndex.getWhereFields();
for (int i = 0; i < idxs.length; i++) {
if (idxs[i] != null) {
mappingDefinition.addMappingForMember(i, idxs[i]);
}
}
sm.provideFields(whereFieldNumbers, new ParameterSetter(sm, ps, mappingDefinition));
}
if (multitenancyStatementMapping != null) {
table.getSurrogateMapping(SurrogateColumnType.MULTITENANCY, false).setObject(ec, ps, multitenancyStatementMapping.getParameterPositionsForOccurrence(0), ec.getTenantId());
}
if (optimisticChecks) {
// WHERE clause - current version discriminator
JavaTypeMapping verMapping = mappingStatementIndex.getWhereVersion().getMapping();
Object currentVersion = sm.getTransactionalVersion();
if (currentVersion == null) {
// Somehow the version is not set on this object (not read in ?) so report the bug
String msg = Localiser.msg("052202", IdentityUtils.getPersistableIdentityForId(sm.getInternalObjectId()), table);
NucleusLogger.PERSISTENCE.error(msg);
throw new NucleusException(msg);
}
StatementMappingIndex mapIdx = mappingStatementIndex.getWhereVersion();
for (int i = 0; i < mapIdx.getNumberOfParameterOccurrences(); i++) {
verMapping.setObject(ec, ps, mapIdx.getParameterPositionsForOccurrence(i), currentVersion);
}
}
int[] rcs = sqlControl.executeStatementUpdate(ec, mconn, stmt, ps, !batch);
if (optimisticChecks && rcs[0] == 0) {
// No object deleted so either object disappeared or failed optimistic version checks
throw new NucleusOptimisticException(Localiser.msg("052203", IdentityUtils.getPersistableIdentityForId(sm.getInternalObjectId()), "" + sm.getTransactionalVersion()), sm.getObject());
}
if (relatedObjectsToDelete != null && !relatedObjectsToDelete.isEmpty()) {
// Delete any related objects that need deleting after the delete of this object
Iterator iter = relatedObjectsToDelete.iterator();
while (iter.hasNext()) {
Object relatedObject = iter.next();
ec.deleteObjectInternal(relatedObject);
}
}
} finally {
sqlControl.closeStatement(mconn, ps);
}
} finally {
mconn.release();
}
} catch (SQLException e) {
String msg = Localiser.msg("052211", IdentityUtils.getPersistableIdentityForId(sm.getInternalObjectId()), stmt, e.getMessage());
NucleusLogger.PERSISTENCE.warn(msg);
List exceptions = new ArrayList();
exceptions.add(e);
while ((e = e.getNextException()) != null) {
exceptions.add(e);
}
throw new NucleusDataStoreException(msg, (Throwable[]) exceptions.toArray(new Throwable[exceptions.size()]));
}
}
use of org.datanucleus.exceptions.NucleusException in project datanucleus-rdbms by datanucleus.
the class JPQLQuery method compileQueryFull.
/**
* Method to set the (native) query statement for the compiled query as a whole.
* The "table groups" in the resultant SQLStatement will be named as per the candidate alias,
* and thereafter "{alias}.{fieldName}".
* @param parameters Input parameters (if known)
* @param candidateCmd Metadata for the candidate class
*/
private void compileQueryFull(Map parameters, AbstractClassMetaData candidateCmd) {
if (type != QueryType.SELECT) {
return;
}
if (candidateCollection != null) {
return;
}
long startTime = 0;
if (NucleusLogger.QUERY.isDebugEnabled()) {
startTime = System.currentTimeMillis();
NucleusLogger.QUERY.debug(Localiser.msg("021083", getLanguage(), toString()));
}
if (result != null) {
datastoreCompilation.setResultDefinition(new StatementResultMapping());
} else {
datastoreCompilation.setResultDefinitionForClass(new StatementClassMapping());
}
// Generate statement for candidate(s)
SelectStatement stmt = null;
try {
boolean includeSoftDeletes = getBooleanExtensionProperty(EXTENSION_INCLUDE_SOFT_DELETES, false);
boolean dontRestrictDiscrim = getBooleanExtensionProperty(EXTENSION_CANDIDATE_DONT_RESTRICT_DISCRIMINATOR, false);
Set<String> options = null;
if (includeSoftDeletes) {
options = new HashSet<>();
options.add(SelectStatementGenerator.OPTION_INCLUDE_SOFT_DELETES);
}
if (dontRestrictDiscrim) {
if (options == null) {
options = new HashSet<>();
}
options.add(SelectStatementGenerator.OPTION_DONT_RESTRICT_DISCRIM);
}
stmt = RDBMSQueryUtils.getStatementForCandidates((RDBMSStoreManager) getStoreManager(), null, candidateCmd, datastoreCompilation.getResultDefinitionForClass(), ec, candidateClass, subclasses, result, compilation.getCandidateAlias(), compilation.getCandidateAlias(), options);
} catch (NucleusException ne) {
// Statement would result in no results, so just catch it and avoid generating the statement
NucleusLogger.QUERY.warn("Query for candidates of " + candidateClass.getName() + (subclasses ? " and subclasses" : "") + " resulted in no possible candidates : " + StringUtils.getMessageFromRootCauseOfThrowable(ne));
statementReturnsEmpty = true;
return;
}
// Update the SQLStatement with filter, ordering, result etc
Set<String> options = new HashSet<>();
options.add(QueryToSQLMapper.OPTION_CASE_INSENSITIVE);
options.add(QueryToSQLMapper.OPTION_EXPLICIT_JOINS);
if (// Default to false for "IS NULL" with null param
getBooleanExtensionProperty(EXTENSION_USE_IS_NULL_WHEN_EQUALS_NULL_PARAM, false)) {
options.add(QueryToSQLMapper.OPTION_NULL_PARAM_USE_IS_NULL);
}
QueryToSQLMapper sqlMapper = new QueryToSQLMapper(stmt, compilation, parameters, datastoreCompilation.getResultDefinitionForClass(), datastoreCompilation.getResultDefinition(), candidateCmd, subclasses, getFetchPlan(), ec, null, options, extensions);
setMapperJoinTypes(sqlMapper);
sqlMapper.compile();
datastoreCompilation.setParameterNameByPosition(sqlMapper.getParameterNameByPosition());
datastoreCompilation.setPrecompilable(sqlMapper.isPrecompilable());
// Apply any range
if (range != null) {
long lower = fromInclNo;
long upper = toExclNo;
if (fromInclParam != null) {
lower = ((Number) parameters.get(fromInclParam)).longValue();
}
if (toExclParam != null) {
upper = ((Number) parameters.get(toExclParam)).longValue();
}
long count = upper - lower;
if (upper == Long.MAX_VALUE) {
count = -1;
}
stmt.setRange(lower, count);
}
// Set any extensions
boolean useUpdateLock = RDBMSQueryUtils.useUpdateLockForQuery(this);
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, Boolean.valueOf(useUpdateLock));
if (getBooleanExtensionProperty(EXTENSION_FOR_UPDATE_NOWAIT, false)) {
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE_NOWAIT, Boolean.TRUE);
}
datastoreCompilation.addStatement(stmt, stmt.getSQLText().toSQL(), false);
datastoreCompilation.setStatementParameters(stmt.getSQLText().getParametersForStatement());
if (result == null && !(resultClass != null && resultClass != candidateClass)) {
// Select of candidates, so check for any immediate multi-valued fields that are marked for fetching
// TODO If the query joins to a 1-1/N-1 and then we have a multi-valued field, we should allow that too
FetchPlanForClass fpc = getFetchPlan().getFetchPlanForClass(candidateCmd);
int[] fpMembers = fpc.getMemberNumbers();
String multifetchType = getStringExtensionProperty(RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH, null);
if ("none".equalsIgnoreCase(multifetchType)) {
} else {
// Bulk-Fetch
for (int i = 0; i < fpMembers.length; i++) {
AbstractMemberMetaData fpMmd = candidateCmd.getMetaDataForManagedMemberAtAbsolutePosition(fpMembers[i]);
if (multifetchType == null) {
// Default to bulk-fetch, so advise the user of why this is happening and how to turn it off
NucleusLogger.QUERY.debug("You have selected field " + fpMmd.getFullFieldName() + " for fetching by this query. We will fetch it using 'EXISTS'." + " To disable this set the query extension/hint '" + RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH + "' as 'none' or remove the field" + " from the query FetchPlan. If this bulk-fetch generates an invalid or unoptimised query, please report it with a way of reproducing it");
multifetchType = "exists";
}
RelationType relType = fpMmd.getRelationType(clr);
if (RelationType.isRelationMultiValued(relType)) {
if (fpMmd.hasCollection() && SCOUtils.collectionHasSerialisedElements(fpMmd)) {
// Ignore collections stored into the owner (retrieved in main query)
} else if (fpMmd.hasArray() && SCOUtils.arrayIsStoredInSingleColumn(fpMmd, ec.getMetaDataManager())) {
// Ignore arrays stored into the owner (retrieved in main query)
} else if (fpMmd.hasMap() && SCOUtils.mapHasSerialisedKeysAndValues(fpMmd)) {
// Ignore maps stored into the owner (retrieved in main query)
} else {
if ("exists".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchExistsHandler helper = new BulkFetchExistsHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else if ("join".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchJoinHandler helper = new BulkFetchJoinHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is not (bulk) fetched by this query; unsupported bulk-fetch type.");
}
}
}
}
}
}
if (NucleusLogger.QUERY.isDebugEnabled()) {
NucleusLogger.QUERY.debug(Localiser.msg("021084", getLanguage(), System.currentTimeMillis() - startTime));
}
}
Aggregations