use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.
the class FKMapStore method getValue.
/**
* Method to retrieve a value from the Map given the key.
* @param ownerOP ObjectProvider for the owner of the map.
* @param key The key to retrieve the value for.
* @return The value for this key
* @throws NoSuchElementException if the key was not found
*/
protected V getValue(ObjectProvider ownerOP, Object key) throws NoSuchElementException {
if (!validateKeyForReading(ownerOP, key)) {
return null;
}
ExecutionContext ec = ownerOP.getExecutionContext();
if (getStmtLocked == null) {
synchronized (// Make sure this completes in case another thread needs the same info
this) {
// Generate the statement, and statement mapping/parameter information
SQLStatement sqlStmt = getSQLStatementForGet(ownerOP);
getStmtUnlocked = sqlStmt.getSQLText().toSQL();
sqlStmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, true);
getStmtLocked = sqlStmt.getSQLText().toSQL();
}
}
Transaction tx = ec.getTransaction();
String stmt = (tx.getSerializeRead() != null && tx.getSerializeRead() ? getStmtLocked : getStmtUnlocked);
Object value = null;
try {
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
SQLController sqlControl = storeMgr.getSQLController();
try {
// Create the statement and supply owner/key params
PreparedStatement ps = sqlControl.getStatementForQuery(mconn, stmt);
StatementMappingIndex ownerIdx = getMappingParams.getMappingForParameter("owner");
int numParams = ownerIdx.getNumberOfParameterOccurrences();
for (int paramInstance = 0; paramInstance < numParams; paramInstance++) {
ownerIdx.getMapping().setObject(ec, ps, ownerIdx.getParameterPositionsForOccurrence(paramInstance), ownerOP.getObject());
}
StatementMappingIndex keyIdx = getMappingParams.getMappingForParameter("key");
numParams = keyIdx.getNumberOfParameterOccurrences();
for (int paramInstance = 0; paramInstance < numParams; paramInstance++) {
keyIdx.getMapping().setObject(ec, ps, keyIdx.getParameterPositionsForOccurrence(paramInstance), key);
}
try {
ResultSet rs = sqlControl.executeStatementQuery(ec, mconn, stmt, ps);
try {
boolean found = rs.next();
if (!found) {
throw new NoSuchElementException();
}
if (valuesAreEmbedded || valuesAreSerialised) {
int[] param = new int[valueMapping.getNumberOfDatastoreMappings()];
for (int i = 0; i < param.length; ++i) {
param[i] = i + 1;
}
if (valueMapping instanceof SerialisedPCMapping || valueMapping instanceof SerialisedReferenceMapping || valueMapping instanceof EmbeddedKeyPCMapping) {
// Value = Serialised
value = valueMapping.getObject(ec, rs, param, ownerOP, ((JoinTable) mapTable).getOwnerMemberMetaData().getAbsoluteFieldNumber());
} else {
// Value = Non-PC
value = valueMapping.getObject(ec, rs, param);
}
} else if (valueMapping instanceof ReferenceMapping) {
// Value = Reference (Interface/Object)
int[] param = new int[valueMapping.getNumberOfDatastoreMappings()];
for (int i = 0; i < param.length; ++i) {
param[i] = i + 1;
}
value = valueMapping.getObject(ec, rs, param);
} else {
// Value = PC
ResultObjectFactory rof = new PersistentClassROF(ec, rs, false, getMappingDef, valueCmd, clr.classForName(valueType));
value = rof.getObject();
}
JDBCUtils.logWarnings(rs);
} finally {
rs.close();
}
} finally {
sqlControl.closeStatement(mconn, ps);
}
} finally {
mconn.release();
}
} catch (SQLException e) {
throw new NucleusDataStoreException(Localiser.msg("056014", stmt), e);
}
return (V) value;
}
use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.
the class FKMapStore method getSQLStatementForGet.
/**
* Method to return an SQLStatement for retrieving the value for a key.
* Selects the join table and optionally joins to the value table if it has its own table.
* @param ownerOP ObjectProvider for the owning object
* @return The SQLStatement
*/
protected SelectStatement getSQLStatementForGet(ObjectProvider ownerOP) {
SelectStatement sqlStmt = null;
ExecutionContext ec = ownerOP.getExecutionContext();
final ClassLoaderResolver clr = ownerOP.getExecutionContext().getClassLoaderResolver();
final Class valueCls = clr.classForName(this.valueType);
if (ownerMemberMetaData.getMap().getMapType() == MapType.MAP_TYPE_KEY_IN_VALUE) {
getMappingDef = new StatementClassMapping();
if (valueTable.getDiscriminatorMetaData() != null && valueTable.getDiscriminatorMetaData().getStrategy() != DiscriminatorStrategy.NONE) {
// Value class has discriminator
if (ClassUtils.isReferenceType(valueCls)) {
String[] clsNames = storeMgr.getNucleusContext().getMetaDataManager().getClassesImplementingInterface(valueType, clr);
Class[] cls = new Class[clsNames.length];
for (int i = 0; i < clsNames.length; i++) {
cls[i] = clr.classForName(clsNames[i]);
}
sqlStmt = new DiscriminatorStatementGenerator(storeMgr, clr, cls, true, null, null).getStatement(ec);
} else {
sqlStmt = new DiscriminatorStatementGenerator(storeMgr, clr, valueCls, true, null, null).getStatement(ec);
}
iterateUsingDiscriminator = true;
} else {
// Use union to resolve any subclasses of value
UnionStatementGenerator stmtGen = new UnionStatementGenerator(storeMgr, clr, valueCls, true, null, null);
stmtGen.setOption(SelectStatementGenerator.OPTION_SELECT_DN_TYPE);
getMappingDef.setNucleusTypeColumnName(UnionStatementGenerator.DN_TYPE_COLUMN);
sqlStmt = stmtGen.getStatement(ec);
}
// Select the value field(s)
SQLStatementHelper.selectFetchPlanOfSourceClassInStatement(sqlStmt, getMappingDef, ec.getFetchPlan(), sqlStmt.getPrimaryTable(), valueCmd, ec.getFetchPlan().getMaxFetchDepth());
} else {
// Value is in key table
sqlStmt = new SelectStatement(storeMgr, mapTable, null, null);
sqlStmt.setClassLoaderResolver(clr);
if (valueCmd != null) {
// Left outer join to value table (so we allow for null values)
SQLTable valueSqlTbl = sqlStmt.join(JoinType.LEFT_OUTER_JOIN, sqlStmt.getPrimaryTable(), valueMapping, valueTable, null, valueTable.getIdMapping(), null, null, true);
// Select the value field(s)
SQLStatementHelper.selectFetchPlanOfSourceClassInStatement(sqlStmt, getMappingDef, ec.getFetchPlan(), valueSqlTbl, valueCmd, ec.getFetchPlan().getMaxFetchDepth());
} else {
sqlStmt.select(sqlStmt.getPrimaryTable(), valueMapping, null);
}
}
// Apply condition on owner field to filter by owner
SQLExpressionFactory exprFactory = storeMgr.getSQLExpressionFactory();
SQLTable ownerSqlTbl = SQLStatementHelper.getSQLTableForMappingOfTable(sqlStmt, sqlStmt.getPrimaryTable(), ownerMapping);
SQLExpression ownerExpr = exprFactory.newExpression(sqlStmt, ownerSqlTbl, ownerMapping);
SQLExpression ownerVal = exprFactory.newLiteralParameter(sqlStmt, ownerMapping, null, "OWNER");
sqlStmt.whereAnd(ownerExpr.eq(ownerVal), true);
// Apply condition on key
if (keyMapping instanceof SerialisedMapping) {
// if the keyMapping contains a BLOB column (or any other column not supported by the database
// as primary key), uses like instead of the operator OP_EQ (=)
// in future do not check if the keyMapping is of ObjectMapping, but use the database
// adapter to check the data types not supported as primary key
// if object mapping (BLOB) use like
SQLExpression keyExpr = exprFactory.newExpression(sqlStmt, sqlStmt.getPrimaryTable(), keyMapping);
SQLExpression keyVal = exprFactory.newLiteralParameter(sqlStmt, keyMapping, null, "KEY");
sqlStmt.whereAnd(new org.datanucleus.store.rdbms.sql.expression.BooleanExpression(keyExpr, Expression.OP_LIKE, keyVal), true);
} else {
SQLExpression keyExpr = exprFactory.newExpression(sqlStmt, sqlStmt.getPrimaryTable(), keyMapping);
SQLExpression keyVal = exprFactory.newLiteralParameter(sqlStmt, keyMapping, null, "KEY");
sqlStmt.whereAnd(keyExpr.eq(keyVal), true);
}
// Input parameter(s) - owner, key
int inputParamNum = 1;
StatementMappingIndex ownerIdx = new StatementMappingIndex(ownerMapping);
StatementMappingIndex keyIdx = new StatementMappingIndex(keyMapping);
if (sqlStmt.getNumberOfUnions() > 0) {
// Add parameter occurrence for each union of statement
for (int j = 0; j < sqlStmt.getNumberOfUnions() + 1; j++) {
int[] ownerPositions = new int[ownerMapping.getNumberOfDatastoreMappings()];
for (int k = 0; k < ownerPositions.length; k++) {
ownerPositions[k] = inputParamNum++;
}
ownerIdx.addParameterOccurrence(ownerPositions);
int[] keyPositions = new int[keyMapping.getNumberOfDatastoreMappings()];
for (int k = 0; k < keyPositions.length; k++) {
keyPositions[k] = inputParamNum++;
}
keyIdx.addParameterOccurrence(keyPositions);
}
} else {
int[] ownerPositions = new int[ownerMapping.getNumberOfDatastoreMappings()];
for (int k = 0; k < ownerPositions.length; k++) {
ownerPositions[k] = inputParamNum++;
}
ownerIdx.addParameterOccurrence(ownerPositions);
int[] keyPositions = new int[keyMapping.getNumberOfDatastoreMappings()];
for (int k = 0; k < keyPositions.length; k++) {
keyPositions[k] = inputParamNum++;
}
keyIdx.addParameterOccurrence(keyPositions);
}
getMappingParams = new StatementParameterMapping();
getMappingParams.addMappingForParameter("owner", ownerIdx);
getMappingParams.addMappingForParameter("key", keyIdx);
return sqlStmt;
}
use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.
the class JoinListStore method internalRemove.
private int[] internalRemove(ObjectProvider op, ManagedConnection conn, boolean batched, Object element, boolean executeNow) throws MappedDatastoreException {
ExecutionContext ec = op.getExecutionContext();
SQLController sqlControl = storeMgr.getSQLController();
String removeStmt = getRemoveStmt(element);
try {
PreparedStatement ps = sqlControl.getStatementForUpdate(conn, removeStmt, batched);
try {
int jdbcPosition = 1;
jdbcPosition = BackingStoreHelper.populateOwnerInStatement(op, ec, ps, jdbcPosition, this);
jdbcPosition = BackingStoreHelper.populateElementForWhereClauseInStatement(ec, ps, element, jdbcPosition, elementMapping);
if (relationDiscriminatorMapping != null) {
jdbcPosition = BackingStoreHelper.populateRelationDiscriminatorInStatement(ec, ps, jdbcPosition, this);
}
// Execute the statement
return sqlControl.executeStatementUpdate(ec, conn, removeStmt, ps, executeNow);
} finally {
sqlControl.closeStatement(conn, ps);
}
} catch (SQLException sqle) {
throw new MappedDatastoreException("SQLException", sqle);
}
}
use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.
the class JoinListStore method removeAll.
/**
* Remove all elements from a collection from the association owner vs
* elements. Performs the removal in 3 steps. The first gets the indices
* that will be removed (and the highest index present). The second step
* removes these elements from the list. The third step updates the indices
* of the remaining indices to fill the holes created.
* @param op ObjectProvider
* @param elements Collection of elements to remove
* @return Whether the database was updated
*/
public boolean removeAll(ObjectProvider op, Collection elements, int size) {
if (elements == null || elements.size() == 0) {
return false;
}
// Get the current size of the list (and hence maximum index size)
int currentListSize = size(op);
// Get the indices of the elements we are going to remove (highest first)
int[] indices = getIndicesOf(op, elements);
if (indices == null) {
return false;
}
boolean modified = false;
SQLController sqlControl = storeMgr.getSQLController();
ExecutionContext ec = op.getExecutionContext();
// Remove the specified elements from the join table
String removeAllStmt = getRemoveAllStmt(elements);
try {
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
try {
PreparedStatement ps = sqlControl.getStatementForUpdate(mconn, removeAllStmt, false);
try {
int jdbcPosition = 1;
Iterator iter = elements.iterator();
while (iter.hasNext()) {
Object element = iter.next();
jdbcPosition = BackingStoreHelper.populateOwnerInStatement(op, ec, ps, jdbcPosition, this);
jdbcPosition = BackingStoreHelper.populateElementForWhereClauseInStatement(ec, ps, element, jdbcPosition, elementMapping);
if (relationDiscriminatorMapping != null) {
jdbcPosition = BackingStoreHelper.populateRelationDiscriminatorInStatement(ec, ps, jdbcPosition, this);
}
}
int[] number = sqlControl.executeStatementUpdate(ec, mconn, removeAllStmt, ps, true);
if (number[0] > 0) {
modified = true;
}
} finally {
sqlControl.closeStatement(mconn, ps);
}
} finally {
mconn.release();
}
} catch (SQLException e) {
NucleusLogger.DATASTORE.error(e);
throw new NucleusDataStoreException(Localiser.msg("056012", removeAllStmt), e);
}
// Shift the remaining indices to remove the holes in ordering
try {
boolean batched = storeMgr.allowsBatching();
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
try {
for (int i = 0; i < currentListSize; i++) {
// Find the number of deleted indexes above this index
int shift = 0;
boolean removed = false;
for (int j = 0; j < indices.length; j++) {
if (indices[j] == i) {
removed = true;
break;
}
if (indices[j] < i) {
shift++;
}
}
if (!removed && shift > 0) {
internalShift(op, mconn, batched, i, -1 * shift, (i == currentListSize - 1));
}
}
} finally {
mconn.release();
}
} catch (MappedDatastoreException e) {
NucleusLogger.DATASTORE.error(e);
throw new NucleusDataStoreException(Localiser.msg("056012", removeAllStmt), e);
}
// Dependent field
boolean dependent = getOwnerMemberMetaData().getCollection().isDependentElement();
if (getOwnerMemberMetaData().isCascadeRemoveOrphans()) {
dependent = true;
}
if (dependent) {
// "delete-dependent" : delete elements if the collection is marked as dependent
// TODO What if the collection contains elements that are not in the List ? should not delete them
op.getExecutionContext().deleteObjects(elements.toArray());
}
return modified;
}
use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.
the class JoinListStore method internalRemove.
/**
* Convenience method to remove the specified element from the List.
* @param element The element
* @param ownerOP ObjectProvider of the owner
* @param size Current size of list if known. -1 if not known
* @return Whether the List was modified
*/
protected boolean internalRemove(ObjectProvider ownerOP, Object element, int size) {
boolean modified = false;
if (indexedList) {
// Indexed List, so retrieve the index of the element and remove the object
// Get the indices of the elements to remove in reverse order (highest first)
// This is done because the element could be duplicated in the list.
Collection elements = new ArrayList();
elements.add(element);
int[] indices = getIndicesOf(ownerOP, elements);
if (indices == null) {
return false;
}
// TODO : Change this to remove all in one go and then shift once
for (int i = 0; i < indices.length; i++) {
internalRemoveAt(ownerOP, indices[i], size);
modified = true;
}
} else {
// Ordered List - just remove the list item since no indexing present
ExecutionContext ec = ownerOP.getExecutionContext();
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
try {
int[] rcs = internalRemove(ownerOP, mconn, false, element, true);
if (rcs != null) {
if (rcs[0] > 0) {
modified = true;
}
}
} catch (MappedDatastoreException sqe) {
String msg = Localiser.msg("056012", sqe.getMessage());
NucleusLogger.DATASTORE.error(msg, sqe.getCause());
throw new NucleusDataStoreException(msg, sqe, ownerOP.getObject());
} finally {
mconn.release();
}
}
return modified;
}
Aggregations