use of org.datanucleus.store.rdbms.scostore.IteratorStatement in project datanucleus-rdbms by datanucleus.
the class BulkFetchExistsHandler method getStatementToBulkFetchField.
/**
* Convenience method to generate a bulk-fetch statement for the specified multi-valued field of the owning query.
* @param candidateCmd Metadata for the candidate
* @param parameters Parameters for the query
* @param mmd Metadata for the multi-valued field
* @param datastoreCompilation The datastore compilation of the query
* @param mapperOptions Any options for the query to SQL mapper
* @return The bulk-fetch statement for retrieving this multi-valued field.
*/
public IteratorStatement getStatementToBulkFetchField(AbstractClassMetaData candidateCmd, AbstractMemberMetaData mmd, Query query, Map parameters, RDBMSQueryCompilation datastoreCompilation, Set<String> mapperOptions) {
IteratorStatement iterStmt = null;
ExecutionContext ec = query.getExecutionContext();
ClassLoaderResolver clr = ec.getClassLoaderResolver();
RDBMSStoreManager storeMgr = (RDBMSStoreManager) query.getStoreManager();
Store backingStore = storeMgr.getBackingStoreForField(clr, mmd, null);
if (backingStore instanceof JoinSetStore || backingStore instanceof JoinListStore || backingStore instanceof JoinArrayStore) {
// Set/List/array using join-table : Generate an iterator query of the form
if (backingStore instanceof JoinSetStore) {
iterStmt = ((JoinSetStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false);
} else if (backingStore instanceof JoinListStore) {
iterStmt = ((JoinListStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false, -1, -1);
} else if (backingStore instanceof JoinArrayStore) {
iterStmt = ((JoinArrayStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false);
} else {
throw new NucleusUserException("We do not support BulkFetch using EXISTS for backingStore = " + backingStore);
}
// SELECT ELEM_TBL.COL1, ELEM_TBL.COL2, ... FROM JOIN_TBL INNER_JOIN ELEM_TBL WHERE JOIN_TBL.ELEMENT_ID = ELEM_TBL.ID
// AND EXISTS (SELECT OWNER_TBL.ID FROM OWNER_TBL WHERE (queryWhereClause) AND JOIN_TBL.OWNER_ID = OWNER_TBL.ID)
SelectStatement sqlStmt = iterStmt.getSelectStatement();
JoinTable joinTbl = (JoinTable) sqlStmt.getPrimaryTable().getTable();
JavaTypeMapping joinOwnerMapping = joinTbl.getOwnerMapping();
// Generate the EXISTS subquery (based on the JDOQL/JPQL query)
SelectStatement existsStmt = RDBMSQueryUtils.getStatementForCandidates(storeMgr, sqlStmt, candidateCmd, datastoreCompilation.getResultDefinitionForClass(), ec, query.getCandidateClass(), query.isSubclasses(), query.getResult(), null, null, null);
Set<String> options = new HashSet<>();
if (mapperOptions != null) {
options.addAll(mapperOptions);
}
options.add(QueryToSQLMapper.OPTION_SELECT_CANDIDATE_ID_ONLY);
QueryToSQLMapper sqlMapper = new QueryToSQLMapper(existsStmt, query.getCompilation(), parameters, null, null, candidateCmd, query.isSubclasses(), query.getFetchPlan(), ec, query.getParsedImports(), options, query.getExtensions());
sqlMapper.compile();
// Add EXISTS clause on iterator statement so we can restrict to just the owners in this query
// ORDER BY in EXISTS is forbidden by some RDBMS
existsStmt.setOrdering(null, null);
BooleanExpression existsExpr = new BooleanSubqueryExpression(sqlStmt, "EXISTS", existsStmt);
sqlStmt.whereAnd(existsExpr, true);
// Join to outer statement so we restrict to collection elements for the query candidates
SQLExpression joinTblOwnerExpr = sqlStmt.getRDBMSManager().getSQLExpressionFactory().newExpression(sqlStmt, sqlStmt.getPrimaryTable(), joinOwnerMapping);
SQLExpression existsOwnerExpr = sqlStmt.getRDBMSManager().getSQLExpressionFactory().newExpression(existsStmt, existsStmt.getPrimaryTable(), existsStmt.getPrimaryTable().getTable().getIdMapping());
existsStmt.whereAnd(joinTblOwnerExpr.eq(existsOwnerExpr), true);
// Select the owner candidate so we can separate the collection elements out to their owner
int[] ownerColIndexes = sqlStmt.select(joinTblOwnerExpr, null);
StatementMappingIndex ownerMapIdx = new StatementMappingIndex(existsStmt.getPrimaryTable().getTable().getIdMapping());
ownerMapIdx.setColumnPositions(ownerColIndexes);
iterStmt.setOwnerMapIndex(ownerMapIdx);
} else if (backingStore instanceof FKSetStore || backingStore instanceof FKListStore || backingStore instanceof FKArrayStore) {
if (backingStore instanceof FKSetStore) {
iterStmt = ((FKSetStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false);
} else if (backingStore instanceof FKListStore) {
iterStmt = ((FKListStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false, -1, -1);
} else if (backingStore instanceof FKArrayStore) {
iterStmt = ((FKArrayStore) backingStore).getIteratorStatement(ec, ec.getFetchPlan(), false);
} else {
throw new NucleusUserException("We do not support BulkFetch using EXISTS for backingStore = " + backingStore);
}
// Set/List/array using foreign-key : Generate an iterator query of the form
// SELECT ELEM_TBL.COL1, ELEM_TBL.COL2, ... FROM ELEM_TBL
// WHERE EXISTS (SELECT OWNER_TBL.ID FROM OWNER_TBL WHERE (queryWhereClause) AND ELEM_TBL.OWNER_ID = OWNER_TBL.ID)
SelectStatement sqlStmt = iterStmt.getSelectStatement();
// Generate the EXISTS subquery (based on the JDOQL/JPQL query)
SelectStatement existsStmt = RDBMSQueryUtils.getStatementForCandidates(storeMgr, sqlStmt, candidateCmd, datastoreCompilation.getResultDefinitionForClass(), ec, query.getCandidateClass(), query.isSubclasses(), query.getResult(), null, null, null);
Set<String> options = new HashSet<>();
if (mapperOptions != null) {
options.addAll(mapperOptions);
}
options.add(QueryToSQLMapper.OPTION_SELECT_CANDIDATE_ID_ONLY);
QueryToSQLMapper sqlMapper = new QueryToSQLMapper(existsStmt, query.getCompilation(), parameters, null, null, candidateCmd, query.isSubclasses(), query.getFetchPlan(), ec, query.getParsedImports(), options, query.getExtensions());
sqlMapper.compile();
// Add EXISTS clause on iterator statement so we can restrict to just the owners in this query
// ORDER BY in EXISTS is forbidden by some RDBMS
existsStmt.setOrdering(null, null);
BooleanExpression existsExpr = new BooleanSubqueryExpression(sqlStmt, "EXISTS", existsStmt);
sqlStmt.whereAnd(existsExpr, true);
// Join to outer statement so we restrict to collection elements for the query candidates
SQLExpression elemTblOwnerExpr = sqlStmt.getRDBMSManager().getSQLExpressionFactory().newExpression(sqlStmt, sqlStmt.getPrimaryTable(), ((BaseContainerStore) backingStore).getOwnerMapping());
SQLExpression existsOwnerExpr = sqlStmt.getRDBMSManager().getSQLExpressionFactory().newExpression(existsStmt, existsStmt.getPrimaryTable(), existsStmt.getPrimaryTable().getTable().getIdMapping());
existsStmt.whereAnd(elemTblOwnerExpr.eq(existsOwnerExpr), true);
// Select the owner candidate so we can separate the collection elements out to their owner
int[] ownerColIndexes = sqlStmt.select(elemTblOwnerExpr, null);
StatementMappingIndex ownerMapIdx = new StatementMappingIndex(existsStmt.getPrimaryTable().getTable().getIdMapping());
ownerMapIdx.setColumnPositions(ownerColIndexes);
iterStmt.setOwnerMapIndex(ownerMapIdx);
}
return iterStmt;
}
use of org.datanucleus.store.rdbms.scostore.IteratorStatement in project datanucleus-rdbms by datanucleus.
the class JDOQLQuery method performExecute.
protected Object performExecute(Map parameters) {
if (statementReturnsEmpty) {
return Collections.EMPTY_LIST;
}
boolean inMemory = evaluateInMemory();
if (candidateCollection != null) {
// Supplied collection of instances, so evaluate in-memory
if (candidateCollection.isEmpty()) {
return Collections.EMPTY_LIST;
} else if (inMemory) {
return new JDOQLInMemoryEvaluator(this, new ArrayList(candidateCollection), compilation, parameters, clr).execute(true, true, true, true, true);
}
} else if (type == QueryType.SELECT) {
// Query results are cached, so return those
List<Object> cachedResults = getQueryManager().getQueryResult(this, parameters);
if (cachedResults != null) {
return new CandidateIdsQueryResult(this, cachedResults);
}
}
Object results = null;
RDBMSStoreManager storeMgr = (RDBMSStoreManager) getStoreManager();
ManagedConnection mconn = storeMgr.getConnectionManager().getConnection(ec);
try {
// Execute the query
long startTime = System.currentTimeMillis();
if (NucleusLogger.QUERY.isDebugEnabled()) {
NucleusLogger.QUERY.debug(Localiser.msg("021046", getLanguage(), getSingleStringQuery(), null));
}
AbstractClassMetaData acmd = ec.getMetaDataManager().getMetaDataForClass(candidateClass, clr);
SQLController sqlControl = storeMgr.getSQLController();
PreparedStatement ps = null;
try {
if (type == QueryType.SELECT) {
// Create PreparedStatement and apply parameters, result settings etc
ps = RDBMSQueryUtils.getPreparedStatementForQuery(mconn, datastoreCompilation.getSQL(), this);
SQLStatementHelper.applyParametersToStatement(ps, ec, datastoreCompilation.getStatementParameters(), datastoreCompilation.getParameterNameByPosition(), parameters);
RDBMSQueryUtils.prepareStatementForExecution(ps, this, true);
registerTask(ps);
ResultSet rs = null;
try {
rs = sqlControl.executeStatementQuery(ec, mconn, datastoreCompilation.getSQL(), ps);
} finally {
deregisterTask();
}
AbstractRDBMSQueryResult qr = null;
try {
if (inMemory) {
// IN-MEMORY EVALUATION
ResultObjectFactory rof = new PersistentClassROF(ec, rs, ignoreCache, getFetchPlan(), datastoreCompilation.getResultDefinitionForClass(), acmd, candidateClass);
// Just instantiate the candidates for later in-memory processing
// TODO Use a queryResult rather than an ArrayList so we load when required
List candidates = new ArrayList();
while (rs.next()) {
candidates.add(rof.getObject());
}
// Perform in-memory filter/result/order etc
results = new JDOQLInMemoryEvaluator(this, candidates, compilation, parameters, clr).execute(true, true, true, true, true);
} else {
// IN-DATASTORE EVALUATION
ResultObjectFactory rof = null;
if (result != null) {
// Each result row is of a result type
rof = new ResultClassROF(ec, rs, ignoreCache, getFetchPlan(), resultClass, datastoreCompilation.getResultDefinition());
} else if (resultClass != null && resultClass != candidateClass) {
rof = new ResultClassROF(ec, rs, resultClass, datastoreCompilation.getResultDefinitionForClass());
} else {
// Each result row is a candidate object
rof = new PersistentClassROF(ec, rs, ignoreCache, getFetchPlan(), datastoreCompilation.getResultDefinitionForClass(), acmd, candidateClass);
}
// Create the required type of QueryResult
qr = RDBMSQueryUtils.getQueryResultForQuery(this, rof, rs, getResultDistinct() ? null : candidateCollection);
// Register any bulk loaded member resultSets that need loading
Map<String, IteratorStatement> scoIterStmts = datastoreCompilation.getSCOIteratorStatements();
if (scoIterStmts != null) {
Iterator<Map.Entry<String, IteratorStatement>> scoStmtIter = scoIterStmts.entrySet().iterator();
while (scoStmtIter.hasNext()) {
Map.Entry<String, IteratorStatement> stmtIterEntry = scoStmtIter.next();
IteratorStatement iterStmt = stmtIterEntry.getValue();
String iterStmtSQL = iterStmt.getSelectStatement().getSQLText().toSQL();
NucleusLogger.DATASTORE_RETRIEVE.debug("JDOQL Bulk-Fetch of " + iterStmt.getBackingStore().getOwnerMemberMetaData().getFullFieldName());
try {
PreparedStatement psSco = sqlControl.getStatementForQuery(mconn, iterStmtSQL);
if (datastoreCompilation.getStatementParameters() != null) {
BulkFetchHandler.applyParametersToStatement(ec, psSco, datastoreCompilation, iterStmt.getSelectStatement(), parameters);
}
ResultSet rsSCO = sqlControl.executeStatementQuery(ec, mconn, iterStmtSQL, psSco);
qr.registerMemberBulkResultSet(iterStmt, rsSCO);
} catch (SQLException e) {
throw new NucleusDataStoreException(Localiser.msg("056006", iterStmtSQL), e);
}
}
}
// Initialise the QueryResult for use
qr.initialise();
// Add hooks for closing query resources
final QueryResult qr1 = qr;
final ManagedConnection mconn1 = mconn;
ManagedConnectionResourceListener listener = new ManagedConnectionResourceListener() {
public void transactionFlushed() {
}
public void transactionPreClose() {
// Tx : disconnect query from ManagedConnection (read in unread rows etc)
qr1.disconnect();
}
public void managedConnectionPreClose() {
if (!ec.getTransaction().isActive()) {
// Non-Tx : disconnect query from ManagedConnection (read in unread rows etc)
qr1.disconnect();
}
}
public void managedConnectionPostClose() {
}
public void resourcePostClose() {
mconn1.removeListener(this);
}
};
mconn.addListener(listener);
qr.addConnectionListener(listener);
results = qr;
}
} finally {
if (qr == null) {
rs.close();
}
}
} else if (type == QueryType.BULK_UPDATE || type == QueryType.BULK_DELETE) {
long bulkResult = 0;
List<StatementCompilation> stmtCompilations = datastoreCompilation.getStatementCompilations();
Iterator<StatementCompilation> stmtCompileIter = stmtCompilations.iterator();
while (stmtCompileIter.hasNext()) {
StatementCompilation stmtCompile = stmtCompileIter.next();
ps = sqlControl.getStatementForUpdate(mconn, stmtCompile.getSQL(), false);
SQLStatementHelper.applyParametersToStatement(ps, ec, datastoreCompilation.getStatementParameters(), datastoreCompilation.getParameterNameByPosition(), parameters);
RDBMSQueryUtils.prepareStatementForExecution(ps, this, false);
int[] execResults = sqlControl.executeStatementUpdate(ec, mconn, toString(), ps, true);
if (stmtCompile.useInCount()) {
bulkResult += execResults[0];
}
}
try {
// Evict all objects of this type from the cache
ec.getNucleusContext().getLevel2Cache().evictAll(candidateClass, subclasses);
} catch (UnsupportedOperationException uoe) {
// Do nothing
}
results = bulkResult;
}
} catch (SQLException sqle) {
if (storeMgr.getDatastoreAdapter().isStatementCancel(sqle)) {
throw new QueryInterruptedException("Query has been interrupted", sqle);
} else if (storeMgr.getDatastoreAdapter().isStatementTimeout(sqle)) {
throw new QueryTimeoutException("Query has been timed out", sqle);
}
throw new NucleusException(Localiser.msg("021042", datastoreCompilation.getSQL()), sqle);
}
if (NucleusLogger.QUERY.isDebugEnabled()) {
NucleusLogger.QUERY.debug(Localiser.msg("021074", getLanguage(), "" + (System.currentTimeMillis() - startTime)));
}
return results;
} finally {
mconn.release();
}
}
use of org.datanucleus.store.rdbms.scostore.IteratorStatement in project datanucleus-rdbms by datanucleus.
the class JDOQLQuery method compileQueryFull.
/**
* Method to set the (native) query statement for the compiled query as a whole.
* @param parameters Input parameters (if known)
* @param candidateCmd Metadata for the candidate class
*/
private void compileQueryFull(Map parameters, AbstractClassMetaData candidateCmd) {
if (type != QueryType.SELECT) {
return;
}
long startTime = 0;
if (NucleusLogger.QUERY.isDebugEnabled()) {
startTime = System.currentTimeMillis();
NucleusLogger.QUERY.debug(Localiser.msg("021083", getLanguage(), toString()));
}
if (result != null) {
datastoreCompilation.setResultDefinition(new StatementResultMapping());
} else {
datastoreCompilation.setResultDefinitionForClass(new StatementClassMapping());
}
// Generate statement for candidate(s)
SelectStatement stmt = null;
try {
boolean includeSoftDeletes = getBooleanExtensionProperty(EXTENSION_INCLUDE_SOFT_DELETES, false);
boolean dontRestrictDiscrim = getBooleanExtensionProperty(EXTENSION_CANDIDATE_DONT_RESTRICT_DISCRIMINATOR, false);
Set<String> options = null;
if (includeSoftDeletes) {
options = new HashSet<>();
options.add(SelectStatementGenerator.OPTION_INCLUDE_SOFT_DELETES);
}
if (dontRestrictDiscrim) {
if (options == null) {
options = new HashSet<>();
}
options.add(SelectStatementGenerator.OPTION_DONT_RESTRICT_DISCRIM);
}
stmt = RDBMSQueryUtils.getStatementForCandidates((RDBMSStoreManager) getStoreManager(), null, candidateCmd, datastoreCompilation.getResultDefinitionForClass(), ec, candidateClass, subclasses, result, null, null, options);
} catch (NucleusException ne) {
// Statement would result in no results, so just catch it and avoid generating the statement
NucleusLogger.QUERY.warn("Query for candidates of " + candidateClass.getName() + (subclasses ? " and subclasses" : "") + " resulted in no possible candidates : " + StringUtils.getMessageFromRootCauseOfThrowable(ne));
statementReturnsEmpty = true;
return;
}
// Update the SQLStatement with filter, ordering, result etc
Set<String> options = new HashSet<>();
options.add(QueryToSQLMapper.OPTION_BULK_UPDATE_VERSION);
if (getBooleanExtensionProperty(EXTENSION_USE_IS_NULL_WHEN_EQUALS_NULL_PARAM, true)) {
options.add(QueryToSQLMapper.OPTION_NULL_PARAM_USE_IS_NULL);
}
if (getBooleanExtensionProperty(EXTENSION_NON_DISTINCT_IMPLICIT_JOIN, false)) {
options.add(QueryToSQLMapper.OPTION_NON_DISTINCT_IMPLICIT_JOINS);
}
QueryToSQLMapper sqlMapper = new QueryToSQLMapper(stmt, compilation, parameters, datastoreCompilation.getResultDefinitionForClass(), datastoreCompilation.getResultDefinition(), candidateCmd, subclasses, getFetchPlan(), ec, getParsedImports(), options, extensions);
setMapperJoinTypes(sqlMapper);
sqlMapper.compile();
datastoreCompilation.setParameterNameByPosition(sqlMapper.getParameterNameByPosition());
datastoreCompilation.setPrecompilable(sqlMapper.isPrecompilable());
if (!getResultDistinct() && stmt.isDistinct()) {
setResultDistinct(true);
compilation.setResultDistinct();
}
if (candidateCollection != null) {
// Restrict to the supplied candidate ids
BooleanExpression candidateExpr = null;
Iterator iter = candidateCollection.iterator();
JavaTypeMapping idMapping = stmt.getPrimaryTable().getTable().getIdMapping();
while (iter.hasNext()) {
Object candidate = iter.next();
SQLExpression idExpr = stmt.getSQLExpressionFactory().newExpression(stmt, stmt.getPrimaryTable(), idMapping);
SQLExpression idVal = stmt.getSQLExpressionFactory().newLiteral(stmt, idMapping, candidate);
if (candidateExpr == null) {
candidateExpr = idExpr.eq(idVal);
} else {
candidateExpr = candidateExpr.ior(idExpr.eq(idVal));
}
}
stmt.whereAnd(candidateExpr, true);
}
// Apply any range
if (range != null) {
long lower = fromInclNo;
long upper = toExclNo;
if (fromInclParam != null) {
if (parameters.containsKey(fromInclParam)) {
lower = ((Number) parameters.get(fromInclParam)).longValue();
} else {
// Must be numbered input so take penultimate
lower = ((Number) parameters.get(Integer.valueOf(parameters.size() - 2))).longValue();
}
}
if (toExclParam != null) {
if (parameters.containsKey(toExclParam)) {
upper = ((Number) parameters.get(toExclParam)).longValue();
} else {
// Must be numbered input so take ultimate
upper = ((Number) parameters.get(Integer.valueOf(parameters.size() - 1))).longValue();
}
}
stmt.setRange(lower, upper - lower);
}
// Set any extensions
boolean useUpdateLock = RDBMSQueryUtils.useUpdateLockForQuery(this);
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, Boolean.valueOf(useUpdateLock));
if (getBooleanExtensionProperty(EXTENSION_FOR_UPDATE_NOWAIT, false)) {
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE_NOWAIT, Boolean.TRUE);
}
datastoreCompilation.addStatement(stmt, stmt.getSQLText().toSQL(), false);
datastoreCompilation.setStatementParameters(stmt.getSQLText().getParametersForStatement());
if (result == null && !(resultClass != null && resultClass != candidateClass)) {
// Select of candidates, so check for any immediate multi-valued fields that are marked for fetching
// TODO If the query joins to a 1-1/N-1 and then we have a multi-valued field, we should allow that too
FetchPlanForClass fpc = getFetchPlan().getFetchPlanForClass(candidateCmd);
int[] fpMembers = fpc.getMemberNumbers();
String multifetchType = getStringExtensionProperty(RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH, null);
if ("none".equalsIgnoreCase(multifetchType)) {
} else {
// Bulk-Fetch
for (int i = 0; i < fpMembers.length; i++) {
AbstractMemberMetaData fpMmd = candidateCmd.getMetaDataForManagedMemberAtAbsolutePosition(fpMembers[i]);
if (multifetchType == null) {
// Default to bulk-fetch EXISTS, so advise the user of why this is happening and how to turn it off
NucleusLogger.QUERY.debug("You have selected field " + fpMmd.getFullFieldName() + " for fetching by this query. We will fetch it using 'EXISTS'." + " To disable this set the query extension/hint '" + RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH + "' as 'none' or remove the field" + " from the query FetchPlan. If this bulk-fetch generates an invalid or unoptimised query, please report it with a way of reproducing it");
multifetchType = "exists";
}
RelationType relType = fpMmd.getRelationType(clr);
if (RelationType.isRelationMultiValued(relType)) {
if (fpMmd.hasCollection() && SCOUtils.collectionHasSerialisedElements(fpMmd)) {
// Ignore collections stored into the owner (retrieved in main query)
} else if (fpMmd.hasArray() && SCOUtils.arrayIsStoredInSingleColumn(fpMmd, ec.getMetaDataManager())) {
// Ignore arrays stored into the owner (retrieved in main query)
} else if (fpMmd.hasMap() && SCOUtils.mapHasSerialisedKeysAndValues(fpMmd)) {
// Ignore maps stored into the owner (retrieved in main query)
} else {
if ("exists".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchExistsHandler helper = new BulkFetchExistsHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.info("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else if ("join".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchJoinHandler helper = new BulkFetchJoinHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.info("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else {
NucleusLogger.GENERAL.info("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is not (bulk) fetched by this query; unsupported bulk-fetch type.");
}
}
}
}
}
}
if (NucleusLogger.QUERY.isDebugEnabled()) {
NucleusLogger.QUERY.debug(Localiser.msg("021084", getLanguage(), System.currentTimeMillis() - startTime));
}
}
use of org.datanucleus.store.rdbms.scostore.IteratorStatement in project datanucleus-rdbms by datanucleus.
the class JPQLQuery method compileQueryFull.
/**
* Method to set the (native) query statement for the compiled query as a whole.
* The "table groups" in the resultant SQLStatement will be named as per the candidate alias,
* and thereafter "{alias}.{fieldName}".
* @param parameters Input parameters (if known)
* @param candidateCmd Metadata for the candidate class
*/
private void compileQueryFull(Map parameters, AbstractClassMetaData candidateCmd) {
if (type != QueryType.SELECT) {
return;
}
if (candidateCollection != null) {
return;
}
long startTime = 0;
if (NucleusLogger.QUERY.isDebugEnabled()) {
startTime = System.currentTimeMillis();
NucleusLogger.QUERY.debug(Localiser.msg("021083", getLanguage(), toString()));
}
if (result != null) {
datastoreCompilation.setResultDefinition(new StatementResultMapping());
} else {
datastoreCompilation.setResultDefinitionForClass(new StatementClassMapping());
}
// Generate statement for candidate(s)
SelectStatement stmt = null;
try {
boolean includeSoftDeletes = getBooleanExtensionProperty(EXTENSION_INCLUDE_SOFT_DELETES, false);
boolean dontRestrictDiscrim = getBooleanExtensionProperty(EXTENSION_CANDIDATE_DONT_RESTRICT_DISCRIMINATOR, false);
Set<String> options = null;
if (includeSoftDeletes) {
options = new HashSet<>();
options.add(SelectStatementGenerator.OPTION_INCLUDE_SOFT_DELETES);
}
if (dontRestrictDiscrim) {
if (options == null) {
options = new HashSet<>();
}
options.add(SelectStatementGenerator.OPTION_DONT_RESTRICT_DISCRIM);
}
stmt = RDBMSQueryUtils.getStatementForCandidates((RDBMSStoreManager) getStoreManager(), null, candidateCmd, datastoreCompilation.getResultDefinitionForClass(), ec, candidateClass, subclasses, result, compilation.getCandidateAlias(), compilation.getCandidateAlias(), options);
} catch (NucleusException ne) {
// Statement would result in no results, so just catch it and avoid generating the statement
NucleusLogger.QUERY.warn("Query for candidates of " + candidateClass.getName() + (subclasses ? " and subclasses" : "") + " resulted in no possible candidates : " + StringUtils.getMessageFromRootCauseOfThrowable(ne));
statementReturnsEmpty = true;
return;
}
// Update the SQLStatement with filter, ordering, result etc
Set<String> options = new HashSet<>();
options.add(QueryToSQLMapper.OPTION_CASE_INSENSITIVE);
options.add(QueryToSQLMapper.OPTION_EXPLICIT_JOINS);
if (// Default to false for "IS NULL" with null param
getBooleanExtensionProperty(EXTENSION_USE_IS_NULL_WHEN_EQUALS_NULL_PARAM, false)) {
options.add(QueryToSQLMapper.OPTION_NULL_PARAM_USE_IS_NULL);
}
QueryToSQLMapper sqlMapper = new QueryToSQLMapper(stmt, compilation, parameters, datastoreCompilation.getResultDefinitionForClass(), datastoreCompilation.getResultDefinition(), candidateCmd, subclasses, getFetchPlan(), ec, null, options, extensions);
setMapperJoinTypes(sqlMapper);
sqlMapper.compile();
datastoreCompilation.setParameterNameByPosition(sqlMapper.getParameterNameByPosition());
datastoreCompilation.setPrecompilable(sqlMapper.isPrecompilable());
// Apply any range
if (range != null) {
long lower = fromInclNo;
long upper = toExclNo;
if (fromInclParam != null) {
lower = ((Number) parameters.get(fromInclParam)).longValue();
}
if (toExclParam != null) {
upper = ((Number) parameters.get(toExclParam)).longValue();
}
long count = upper - lower;
if (upper == Long.MAX_VALUE) {
count = -1;
}
stmt.setRange(lower, count);
}
// Set any extensions
boolean useUpdateLock = RDBMSQueryUtils.useUpdateLockForQuery(this);
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, Boolean.valueOf(useUpdateLock));
if (getBooleanExtensionProperty(EXTENSION_FOR_UPDATE_NOWAIT, false)) {
stmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE_NOWAIT, Boolean.TRUE);
}
datastoreCompilation.addStatement(stmt, stmt.getSQLText().toSQL(), false);
datastoreCompilation.setStatementParameters(stmt.getSQLText().getParametersForStatement());
if (result == null && !(resultClass != null && resultClass != candidateClass)) {
// Select of candidates, so check for any immediate multi-valued fields that are marked for fetching
// TODO If the query joins to a 1-1/N-1 and then we have a multi-valued field, we should allow that too
FetchPlanForClass fpc = getFetchPlan().getFetchPlanForClass(candidateCmd);
int[] fpMembers = fpc.getMemberNumbers();
String multifetchType = getStringExtensionProperty(RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH, null);
if ("none".equalsIgnoreCase(multifetchType)) {
} else {
// Bulk-Fetch
for (int i = 0; i < fpMembers.length; i++) {
AbstractMemberMetaData fpMmd = candidateCmd.getMetaDataForManagedMemberAtAbsolutePosition(fpMembers[i]);
if (multifetchType == null) {
// Default to bulk-fetch, so advise the user of why this is happening and how to turn it off
NucleusLogger.QUERY.debug("You have selected field " + fpMmd.getFullFieldName() + " for fetching by this query. We will fetch it using 'EXISTS'." + " To disable this set the query extension/hint '" + RDBMSPropertyNames.PROPERTY_RDBMS_QUERY_MULTIVALUED_FETCH + "' as 'none' or remove the field" + " from the query FetchPlan. If this bulk-fetch generates an invalid or unoptimised query, please report it with a way of reproducing it");
multifetchType = "exists";
}
RelationType relType = fpMmd.getRelationType(clr);
if (RelationType.isRelationMultiValued(relType)) {
if (fpMmd.hasCollection() && SCOUtils.collectionHasSerialisedElements(fpMmd)) {
// Ignore collections stored into the owner (retrieved in main query)
} else if (fpMmd.hasArray() && SCOUtils.arrayIsStoredInSingleColumn(fpMmd, ec.getMetaDataManager())) {
// Ignore arrays stored into the owner (retrieved in main query)
} else if (fpMmd.hasMap() && SCOUtils.mapHasSerialisedKeysAndValues(fpMmd)) {
// Ignore maps stored into the owner (retrieved in main query)
} else {
if ("exists".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchExistsHandler helper = new BulkFetchExistsHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else if ("join".equalsIgnoreCase(multifetchType)) {
// Fetch container contents for all candidate owners
BulkFetchJoinHandler helper = new BulkFetchJoinHandler();
IteratorStatement iterStmt = helper.getStatementToBulkFetchField(candidateCmd, fpMmd, this, parameters, datastoreCompilation, options);
if (iterStmt != null) {
datastoreCompilation.setSCOIteratorStatement(fpMmd.getFullFieldName(), iterStmt);
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is currently not (bulk) fetched by this query");
}
} else {
NucleusLogger.GENERAL.debug("Query has field " + fpMmd.getFullFieldName() + " marked in the FetchPlan, yet this is not (bulk) fetched by this query; unsupported bulk-fetch type.");
}
}
}
}
}
}
if (NucleusLogger.QUERY.isDebugEnabled()) {
NucleusLogger.QUERY.debug(Localiser.msg("021084", getLanguage(), System.currentTimeMillis() - startTime));
}
}
use of org.datanucleus.store.rdbms.scostore.IteratorStatement in project datanucleus-rdbms by datanucleus.
the class AbstractRDBMSQueryResult method registerMemberBulkResultSet.
public void registerMemberBulkResultSet(IteratorStatement iterStmt, ResultSet rs) {
if (bulkLoadedValueByMemberNumber == null) {
bulkLoadedValueByMemberNumber = new HashMap<>();
}
try {
ExecutionContext ec = query.getExecutionContext();
AbstractMemberMetaData mmd = iterStmt.getBackingStore().getOwnerMemberMetaData();
if (mmd.hasCollection() || mmd.hasArray()) {
ElementIteratorStatement elemIterStmt = (ElementIteratorStatement) iterStmt;
ElementContainerStore backingStore = (ElementContainerStore) iterStmt.getBackingStore();
if (backingStore.isElementsAreEmbedded() || backingStore.isElementsAreSerialised()) {
int[] param = new int[backingStore.getElementMapping().getNumberOfColumnMappings()];
for (int i = 0; i < param.length; ++i) {
param[i] = i + 1;
}
if (backingStore.getElementMapping() instanceof SerialisedPCMapping || backingStore.getElementMapping() instanceof SerialisedReferenceMapping || backingStore.getElementMapping() instanceof EmbeddedElementPCMapping) {
// Element = Serialised
while (rs.next()) {
Object owner = iterStmt.getOwnerMapIndex().getMapping().getObject(ec, rs, iterStmt.getOwnerMapIndex().getColumnPositions());
Object element = backingStore.getElementMapping().getObject(ec, rs, param, ec.findStateManager(owner), backingStore.getOwnerMemberMetaData().getAbsoluteFieldNumber());
addOwnerMemberCollectionElement(mmd, owner, element);
}
} else {
// Element = Non-PC
while (rs.next()) {
Object owner = iterStmt.getOwnerMapIndex().getMapping().getObject(ec, rs, iterStmt.getOwnerMapIndex().getColumnPositions());
Object element = backingStore.getElementMapping().getObject(ec, rs, param);
addOwnerMemberCollectionElement(mmd, owner, element);
}
}
} else if (backingStore.getElementMapping() instanceof ReferenceMapping) {
// Element is Reference (interface/Object) so just use elementMapping
int[] param = new int[backingStore.getElementMapping().getNumberOfColumnMappings()];
for (int i = 0; i < param.length; ++i) {
param[i] = i + 1;
}
while (rs.next()) {
Object owner = iterStmt.getOwnerMapIndex().getMapping().getObject(ec, rs, iterStmt.getOwnerMapIndex().getColumnPositions());
Object element = backingStore.getElementMapping().getObject(ec, rs, param);
addOwnerMemberCollectionElement(mmd, owner, element);
}
} else {
String elementType = mmd.hasCollection() ? backingStore.getOwnerMemberMetaData().getCollection().getElementType() : backingStore.getOwnerMemberMetaData().getArray().getElementType();
ResultObjectFactory<E> scoROF = new PersistentClassROF(ec, rs, query.getIgnoreCache(), fp, elemIterStmt.getElementClassMapping(), backingStore.getElementClassMetaData(), ec.getClassLoaderResolver().classForName(elementType));
while (rs.next()) {
Object owner = iterStmt.getOwnerMapIndex().getMapping().getObject(ec, rs, iterStmt.getOwnerMapIndex().getColumnPositions());
Object element = scoROF.getObject();
addOwnerMemberCollectionElement(mmd, owner, element);
}
}
} else if (mmd.hasMap()) {
// TODO Cater for maps
}
} catch (SQLException sqle) {
NucleusLogger.DATASTORE.error("Exception thrown processing bulk loaded field " + iterStmt.getBackingStore().getOwnerMemberMetaData().getFullFieldName(), sqle);
} finally {
// Close the ResultSet (and its Statement)
try {
Statement stmt = null;
try {
stmt = rs.getStatement();
// Close the result set
rs.close();
} catch (SQLException e) {
NucleusLogger.DATASTORE.error(Localiser.msg("052605", e));
} finally {
try {
if (stmt != null) {
// Close the original statement
stmt.close();
}
} catch (SQLException e) {
// Do nothing
}
}
} finally {
rs = null;
}
}
}
Aggregations