use of org.apache.geode.cache.query.types.StructType in project geode by apache.
the class QueryUtils method getConditionedRelationshipIndexResultsExpandedToTopOrCGJLevel.
/**
* This function is used to evaluate a filter evaluatable composite condition. It gets invoked
* either from a CompositeGroupJunction of "OR" type or a where clause containing single composite
* condition. In the later case the boolean completeExpansion flag is always true. While in the
* former case it may be true or false. If it is false, the array of independent iterators passed
* is not null.
*
* @param data A List object whose elements are two dimensional object array. Each element of the
* List represent a value which satisfies the equi-join condition. Since there may be more
* than one tuples on either side of the equality condition which meet the criteria for a
* given value, we require a 2 dimensional Object array. The cartesian of the two rows will
* give us the set of tuples satisfying the join criteria. Each element of the row of
* Object Array may be either an Object or a Struct object.
* @param indxInfo An array of IndexInfo objects of size 2 , representing the range indexes of the
* two operands. The other Index maps to the 0th Object array row of the List object ( data
* ) & so on.
* @param context ExecutionContext object
* @param completeExpansionNeeded boolean if true indicates that the CGJ needs to be expanded to
* the query from clause ( top level )
* @param iterOperands This will be null as for OR junction we cannot have iter operand
* @param indpdntItrs Array of independent iterators representing the various Groups forming the
* composite group junction. It will be null, if complete expansion flag is true
* @return SelectResults objet representing the result obtained by evaluating a filter evaluatable
* composite condition in an OR junction. The returned Result is expanded either to the
* CompositeGroupJunction level or to the top level as the case may be
*/
static SelectResults getConditionedRelationshipIndexResultsExpandedToTopOrCGJLevel(List data, IndexInfo[] indxInfo, ExecutionContext context, boolean completeExpansionNeeded, CompiledValue iterOperands, RuntimeIterator[] indpdntItrs) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
ObjectType resultType1 = indxInfo[0]._index.getResultSetType();
int indexFieldsSize1 = resultType1 instanceof StructType ? ((StructTypeImpl) resultType1).getFieldNames().length : 1;
ObjectType resultType2 = indxInfo[1]._index.getResultSetType();
int indexFieldsSize2 = resultType2 instanceof StructType ? ((StructTypeImpl) resultType2).getFieldNames().length : 1;
/*
* even if th complete expansion is needed pass the flag of complete expansion as false. Thus
* for LHS & RHS we will get the expansionList for that individual group. Thus the total
* expansion List wil contain sum of the individual expansion lists plus all the iterators of
* the current scope which are dependent on any other groups or are composite iterators ( i.e
* dependent on both the independent groups currently under consideration
*/
// pass completeExpansion as false, irrespective of actual value
IndexConditioningHelper ich1 = new IndexConditioningHelper(indxInfo[0], context, indexFieldsSize1, false, iterOperands, null);
// pass completeExpansion as false, irrespective of actual value
IndexConditioningHelper ich2 = new IndexConditioningHelper(indxInfo[1], context, indexFieldsSize2, false, iterOperands, null);
List totalExpList = new ArrayList();
totalExpList.addAll(ich1.expansionList);
totalExpList.addAll(ich2.expansionList);
List totalFinalList = null;
if (completeExpansionNeeded) {
totalFinalList = context.getCurrentIterators();
Set expnItrsAlreadyAccounted = new HashSet();
expnItrsAlreadyAccounted.addAll(ich1.finalList);
expnItrsAlreadyAccounted.addAll(ich2.finalList);
int size = totalFinalList.size();
for (int i = 0; i < size; ++i) {
RuntimeIterator currItr = (RuntimeIterator) totalFinalList.get(i);
// If the runtimeIterators of scope not present in CheckSet add it to the expansion list
if (!expnItrsAlreadyAccounted.contains(currItr)) {
totalExpList.add(currItr);
}
}
} else {
totalFinalList = new ArrayList();
for (int i = 0; i < indpdntItrs.length; ++i) {
RuntimeIterator indpndntItr = indpdntItrs[i];
if (indpndntItr == ich1.finalList.get(0)) {
totalFinalList.addAll(ich1.finalList);
} else if (indpndntItr == ich2.finalList.get(0)) {
totalFinalList.addAll(ich2.finalList);
} else {
List temp = context.getCurrScopeDpndntItrsBasedOnSingleIndpndntItr(indpndntItr);
totalFinalList.addAll(temp);
totalExpList.addAll(temp);
}
}
}
SelectResults returnSet;
StructType stype = createStructTypeForRuntimeIterators(totalFinalList);
if (totalFinalList.size() == 1) {
returnSet = QueryUtils.createResultCollection(context, new ObjectTypeImpl(stype.getClass()));
} else {
returnSet = QueryUtils.createStructCollection(context, stype);
}
RuntimeIterator[][] mappings = new RuntimeIterator[2][];
mappings[0] = ich1.indexFieldToItrsMapping;
mappings[1] = ich2.indexFieldToItrsMapping;
List[] totalCheckList = new List[] { ich1.checkList, ich2.checkList };
Iterator dataItr = data.iterator();
IndexCutDownExpansionHelper[] icdeh = new IndexCutDownExpansionHelper[] { new IndexCutDownExpansionHelper(ich1.checkList, context), new IndexCutDownExpansionHelper(ich2.checkList, context) };
ListIterator expansionListIterator = totalExpList.listIterator();
if (dataItr.hasNext()) {
QueryObserver observer = QueryObserverHolder.getInstance();
try {
observer.beforeMergeJoinOfDoubleIndexResults(ich1.indxInfo._index, ich2.indxInfo._index, data);
while (dataItr.hasNext()) {
// TODO: Change the code in range Index so that while collecting data instead of creating
// two dimensional object array , we create one dimensional Object array of size 2, & each
// elemnt stores an Object array
Object[][] values = (Object[][]) dataItr.next();
// Before doing the cartesian of the Results , we need to clear the CheckSet of
// IndexCutDownExpansionHelper. This is needed because for a new key , the row of sets
// needs to be considered fresh as presence of old row in checkset may cause us to wrongly
// skip the similar row of a set , even when the row in its entirety is unique ( made by
// different data in the other set)
mergeAndExpandCutDownRelationshipIndexResults(values, returnSet, mappings, expansionListIterator, totalFinalList, context, totalCheckList, iterOperands, icdeh, 0);
if (icdeh[0].cutDownNeeded)
icdeh[0].checkSet.clear();
}
} finally {
observer.afterMergeJoinOfDoubleIndexResults(returnSet);
}
}
return returnSet;
}
use of org.apache.geode.cache.query.types.StructType in project geode by apache.
the class QueryUtils method getRelationshipIndexResultsMergedWithIntermediateResults.
/**
* This function is used to evaluate a filter evaluatable CompositeCondition(ie Range Indexes
* available on both LHS & RHS operands).This function is invoked from AND junction evaluation of
* CompositeGroupJunction. It expands the intermediate resultset passed , to the level of groups
* determined by the LHS & RHS operand, using the range indexes. It is possible that the group of
* iterators for an operand of condition already exists in the intermediate resultset passed. In
* such situation, the intermediate resultset is iterated & the operand ( whose group of iterators
* are available in the intermediate resultset ) is evaluated. For each such evaluated value , the
* other operand's Range Index is queried & the Range Index's results are appropriately expanded &
* cut down & a final tuple obtained( which includes the previously existing fields of
* intermediate resultset). The array of independent iterators passed from the Composite Group
* junction will be null, except for the final condition ( subject to the fact that complete
* expansion flag is false. Otherwise even for final condition , the array will be null) as that
* array will be used to get the final position of iterators in the resultant StructBag
*
* TODO: break this method up
*
* @param intermediateResults SelectResults object containing the intermediate resultset obtained
* by evaluation of previous filter evaluatable composite conditions of the
* CompositeGroupJunction
* @param indxInfo Array of IndexInfo objects ( size 2), representing the range index for the two
* operands of the condition
* @param context ExecutionContext object
* @param completeExpansionNeeded A boolean when true indicates that the final result from
* Composite GroupJunction needs to be evaluated to the query from clause ( top ) level.
* @param iterOperands CompiledValue representing the conditions which are to be iter evaluated.
* This can exist only if instead of AllGroupJunction we have a single
* CompositeGroupJunction
* @param indpdntItrs Array of RuntimeIterators representing the independent iterators of their
* representative groups forming the CompositeGroupJunction *
* @return SelectResults The Result object created by evaluating the filter evaluatable condition
* merged with the intermediate results
*/
static SelectResults getRelationshipIndexResultsMergedWithIntermediateResults(SelectResults intermediateResults, IndexInfo[] indxInfo, ExecutionContext context, boolean completeExpansionNeeded, CompiledValue iterOperands, RuntimeIterator[] indpdntItrs) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
ObjectType resultType1 = indxInfo[0]._index.getResultSetType();
int indexFieldsSize1 = resultType1 instanceof StructType ? ((StructTypeImpl) resultType1).getFieldNames().length : 1;
ObjectType resultType2 = indxInfo[1]._index.getResultSetType();
int indexFieldsSize2 = resultType2 instanceof StructType ? ((StructTypeImpl) resultType2).getFieldNames().length : 1;
/*
* even if the complete expansion is needed pass the flag of complete expansion as false. Thus
* for LHS & RHS we will get the expansionList for that individual group.
*/
// NOTE: use false for completeExpansion irrespective of actual value
IndexConditioningHelper ich1 = new IndexConditioningHelper(indxInfo[0], context, indexFieldsSize1, false, iterOperands, null);
// NOTE: use false for completeExpansion irrespective of actual value
IndexConditioningHelper ich2 = new IndexConditioningHelper(indxInfo[1], context, indexFieldsSize2, false, iterOperands, null);
// We cannot have a condition where in intermediateResultset is empty
// or null & complete
// expansion flag true because in that case instead of this function we should
// have called
int noOfIndexesToUse = intermediateResults == null || intermediateResults.isEmpty() ? 2 : 0;
RuntimeIterator[] resultFieldsItrMapping = null;
List allItrs = context.getCurrentIterators();
IndexConditioningHelper singleUsableICH = null;
IndexConditioningHelper nonUsableICH = null;
List finalList = completeExpansionNeeded ? allItrs : indpdntItrs == null ? new ArrayList() : null;
// the set will contain those iterators which we don't have to expand to either because they are
// already present ( because of intermediate results or because index result already contains
// them
Set expnItrsToIgnore = null;
if (noOfIndexesToUse == 0) {
// If the intermediate Resultset is not empty then check if the resultset
// fields of intermediate
// resultset contains any independent iterator of the current condition
noOfIndexesToUse = 2;
StructType stype = (StructType) intermediateResults.getCollectionType().getElementType();
String[] fieldNames = stype.getFieldNames();
int len = fieldNames.length;
resultFieldsItrMapping = new RuntimeIterator[len];
String fieldName = null;
String lhsID = ich1.indpndntItr.getInternalId();
String rhsID = ich2.indpndntItr.getInternalId();
for (int i = 0; i < len; ++i) {
fieldName = fieldNames[i];
if (noOfIndexesToUse != 0) {
if (fieldName.equals(lhsID)) {
--noOfIndexesToUse;
singleUsableICH = ich2;
nonUsableICH = ich1;
} else if (fieldName.equals(rhsID)) {
--noOfIndexesToUse;
singleUsableICH = ich1;
nonUsableICH = ich2;
}
}
int pos = Integer.parseInt(fieldName.substring(4));
RuntimeIterator itrPrsntInIntermdtRes = (RuntimeIterator) allItrs.get(pos - 1);
resultFieldsItrMapping[i] = itrPrsntInIntermdtRes;
// the iterator below is already present in resultset so needs to be ignored for expansion
if (completeExpansionNeeded) {
if (expnItrsToIgnore == null) {
expnItrsToIgnore = new HashSet();
}
expnItrsToIgnore.add(itrPrsntInIntermdtRes);
} else if (indpdntItrs == null) {
// We will need to know the intermediate iterators so as to know
// the final list which will be used to obtain the correct structset.
// But if the independent group of iterators is passed, the final list needs
// to be calculated
// on its basis
finalList.add(itrPrsntInIntermdtRes);
}
}
if (noOfIndexesToUse == 0) {
singleUsableICH = null;
}
}
QueryObserver observer = QueryObserverHolder.getInstance();
if (noOfIndexesToUse == 2) {
List data = null;
try {
ArrayList resultData = new ArrayList();
observer.beforeIndexLookup(indxInfo[0]._index, OQLLexerTokenTypes.TOK_EQ, null);
observer.beforeIndexLookup(indxInfo[1]._index, OQLLexerTokenTypes.TOK_EQ, null);
if (context.getBucketList() != null) {
data = queryEquijoinConditionBucketIndexes(indxInfo, context);
} else {
data = indxInfo[0]._index.queryEquijoinCondition(indxInfo[1]._index, context);
}
} finally {
observer.afterIndexLookup(data);
}
// For sure we need to evaluate both the conditions & expand it only to
// its own respective
// Ignore the boolean of reshuffling needed etc for this case
List totalExpList = new ArrayList();
totalExpList.addAll(ich1.expansionList);
totalExpList.addAll(ich2.expansionList);
if (completeExpansionNeeded) {
if (expnItrsToIgnore == null) {
// The expnItrsToIgnore set being null at this point implies that though complete
// expansion flag is true but intermediate result set is empty
Support.Assert(intermediateResults == null || intermediateResults.isEmpty(), "expnItrsToIgnore should not have been null if the intermediate result set is not empty");
expnItrsToIgnore = new HashSet();
}
expnItrsToIgnore.addAll(ich1.finalList);
expnItrsToIgnore.addAll(ich2.finalList);
// identify the iterators which we need to expand to
// TODO: Make the code compact by using a common function to take care of this
int size = finalList.size();
for (int i = 0; i < size; ++i) {
RuntimeIterator currItr = (RuntimeIterator) finalList.get(i);
// If the runtimeIterators of scope not present in CheckSet add it to the expansion list
if (!expnItrsToIgnore.contains(currItr)) {
totalExpList.add(currItr);
}
}
} else {
// struct set mismatch while doing intersection with GroupJunction results
if (indpdntItrs != null) {
finalList = getDependentItrChainForIndpndntItrs(indpdntItrs, context);
} else {
finalList.addAll(ich1.finalList);
finalList.addAll(ich2.finalList);
}
}
List[] checkList = new List[] { ich1.checkList, ich2.checkList };
StructType stype = createStructTypeForRuntimeIterators(finalList);
SelectResults returnSet = QueryUtils.createStructCollection(context, stype);
RuntimeIterator[][] mappings = new RuntimeIterator[2][];
mappings[0] = ich1.indexFieldToItrsMapping;
mappings[1] = ich2.indexFieldToItrsMapping;
List[] totalCheckList = new List[] { ich1.checkList, ich2.checkList };
RuntimeIterator[][] resultMappings = new RuntimeIterator[1][];
resultMappings[0] = resultFieldsItrMapping;
Iterator dataItr = data.iterator();
IndexCutDownExpansionHelper[] icdeh = new IndexCutDownExpansionHelper[] { new IndexCutDownExpansionHelper(ich1.checkList, context), new IndexCutDownExpansionHelper(ich2.checkList, context) };
ListIterator expansionListIterator = totalExpList.listIterator();
if (dataItr.hasNext()) {
observer = QueryObserverHolder.getInstance();
try {
observer.beforeMergeJoinOfDoubleIndexResults(indxInfo[0]._index, indxInfo[1]._index, data);
boolean doMergeWithIntermediateResults = intermediateResults != null && !intermediateResults.isEmpty();
int maxCartesianDepth = totalExpList.size() + (doMergeWithIntermediateResults ? 1 : 0);
while (dataItr.hasNext()) {
// TODO: Change the code in range Index so that while collecting data instead of
// creating two dimensional object array , we create one dimensional Object array of
// size 2, & each elemnt stores an Object array
Object[][] values = (Object[][]) dataItr.next();
// made by different data in the other set)
if (doMergeWithIntermediateResults) {
mergeRelationshipIndexResultsWithIntermediateResults(returnSet, new SelectResults[] { intermediateResults }, resultMappings, values, mappings, expansionListIterator, finalList, context, checkList, iterOperands, icdeh, 0, maxCartesianDepth);
} else {
mergeAndExpandCutDownRelationshipIndexResults(values, returnSet, mappings, expansionListIterator, finalList, context, totalCheckList, iterOperands, icdeh, 0);
}
if (icdeh[0].cutDownNeeded)
icdeh[0].checkSet.clear();
}
} finally {
observer.afterMergeJoinOfDoubleIndexResults(returnSet);
}
}
return returnSet;
} else if (noOfIndexesToUse == 1) {
// There exists one independent iterator in the current condition which is also a part of the
// intermediate resultset Identify the final List which will depend upon the complete
// expansion flag Identify the iterators to be expanded to, which will also depend upon
// complete expansion flag..
List totalExpList = new ArrayList();
totalExpList.addAll(singleUsableICH.expansionList);
if (completeExpansionNeeded) {
Support.Assert(expnItrsToIgnore != null, "expnItrsToIgnore should not have been null as we are in this block itself indicates that intermediate results was not null");
expnItrsToIgnore.addAll(singleUsableICH.finalList);
// identify the iterators which we need to expand to
// TODO: Make the code compact by using a common function to take care of this
int size = finalList.size();
for (int i = 0; i < size; ++i) {
RuntimeIterator currItr = (RuntimeIterator) finalList.get(i);
// If the runtimeIterators of scope not present in CheckSet add it to the expansion list
if (!expnItrsToIgnore.contains(currItr)) {
totalExpList.add(currItr);
}
}
} else {
// struct set mismatch while doing intersection with GroupJunction results
if (indpdntItrs != null) {
finalList = getDependentItrChainForIndpndntItrs(indpdntItrs, context);
} else {
finalList.addAll(singleUsableICH.finalList);
}
}
StructType stype = createStructTypeForRuntimeIterators(finalList);
SelectResults returnSet = QueryUtils.createStructCollection(context, stype);
// Obtain the empty resultset for the single usable index
IndexProtocol singleUsblIndex = singleUsableICH.indxInfo._index;
CompiledValue nonUsblIndxPath = nonUsableICH.indxInfo._path;
ObjectType singlUsblIndxResType = singleUsblIndex.getResultSetType();
SelectResults singlUsblIndxRes = null;
if (singlUsblIndxResType instanceof StructType) {
singlUsblIndxRes = QueryUtils.createStructCollection(context, (StructTypeImpl) singlUsblIndxResType);
} else {
singlUsblIndxRes = QueryUtils.createResultCollection(context, singlUsblIndxResType);
}
// iterate over the intermediate structset
Iterator intrmdtRsItr = intermediateResults.iterator();
observer = QueryObserverHolder.getInstance();
try {
observer.beforeIndexLookup(singleUsblIndex, OQLLexerTokenTypes.TOK_EQ, null);
observer.beforeIterJoinOfSingleIndexResults(singleUsblIndex, nonUsableICH.indxInfo._index);
while (intrmdtRsItr.hasNext()) {
Struct strc = (Struct) intrmdtRsItr.next();
Object[] val = strc.getFieldValues();
int len = val.length;
for (int i = 0; i < len; ++i) {
resultFieldsItrMapping[i].setCurrent(val[i]);
}
// TODO: Issue relevant index use callbacks to QueryObserver
Object key = nonUsblIndxPath.evaluate(context);
// TODO: Check this logic out
if (key != null && key.equals(QueryService.UNDEFINED)) {
continue;
}
singleUsblIndex.query(key, OQLLexerTokenTypes.TOK_EQ, singlUsblIndxRes, context);
cutDownAndExpandIndexResults(returnSet, singlUsblIndxRes, singleUsableICH.indexFieldToItrsMapping, totalExpList, finalList, context, singleUsableICH.checkList, iterOperands, singleUsableICH.indxInfo);
singlUsblIndxRes.clear();
}
} finally {
observer.afterIterJoinOfSingleIndexResults(returnSet);
observer.afterIndexLookup(returnSet);
}
return returnSet;
} else {
// PART OF ITER OPERANDS
if (logger.isDebugEnabled()) {
StringBuilder tempBuffLhs = new StringBuilder();
StringBuilder tempBuffRhs = new StringBuilder();
ich1.indxInfo._path.generateCanonicalizedExpression(tempBuffLhs, context);
ich2.indxInfo._path.generateCanonicalizedExpression(tempBuffRhs, context);
logger.debug("For better performance indexes are not used for the condition {} = {}", tempBuffLhs, tempBuffRhs);
}
CompiledValue reconstructedVal = new CompiledComparison(ich1.indxInfo._path, ich2.indxInfo._path, OQLLexerTokenTypes.TOK_EQ);
// Add this reconstructed value to the iter operand if any
CompiledValue finalVal = reconstructedVal;
if (iterOperands != null) {
// The type of CompiledJunction has to be AND junction as this function gets invoked only
// for AND . Also it is OK if we have iterOperands which itself is a CompiledJunction. We
// can have a tree of CompiledJunction with its operands being a CompiledComparison & a
// CompiledJunction. We can live without creating a flat structure
finalVal = new CompiledJunction(new CompiledValue[] { iterOperands, reconstructedVal }, OQLLexerTokenTypes.LITERAL_and);
}
RuntimeIterator[][] resultMappings = new RuntimeIterator[1][];
resultMappings[0] = resultFieldsItrMapping;
return cartesian(new SelectResults[] { intermediateResults }, resultMappings, Collections.emptyList(), finalList, context, finalVal);
}
}
use of org.apache.geode.cache.query.types.StructType in project geode by apache.
the class IndexUseMultFrmSnglCondJUnitTest method testIndexUsageComaprison.
@Test
public void testIndexUsageComaprison() throws Exception {
Region region = CacheUtils.createRegion("portfolios", Portfolio.class);
StructType resArType1 = null;
StructType resArType2 = null;
String[] strAr1 = null;
String[] strAr2 = null;
int resArSize1 = 0;
int resArSize2 = 0;
Object valPf1 = null;
Object valPos1 = null;
Object valPf2 = null;
Object valPos2 = null;
String SECID1 = null;
String SECID2 = null;
Iterator iter1 = null;
Iterator iter2 = null;
Set set1 = null;
Set set2 = null;
for (int i = 0; i < 4; i++) {
region.put("" + i, new Portfolio(i));
}
QueryService qs = CacheUtils.getQueryService();
String[] queries = { "SELECT DISTINCT * from /portfolios pf, pf.positions.values pos where pos.secId = 'IBM'" };
SelectResults[][] r = new SelectResults[queries.length][2];
for (int i = 0; i < queries.length; i++) {
Query q = null;
try {
q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][0] = (SelectResults) q.execute();
if (observer.isIndexesUsed) {
fail("If index were not there how did they get used ???? ");
}
resArType1 = (StructType) (r[i][0]).getCollectionType().getElementType();
resArSize1 = ((r[i][0]).size());
CacheUtils.log(resArType1);
strAr1 = resArType1.getFieldNames();
set1 = ((r[i][0]).asSet());
Iterator iter = set1.iterator();
while (iter.hasNext()) {
Struct stc1 = (Struct) iter.next();
valPf1 = stc1.get(strAr1[0]);
valPos1 = stc1.get(strAr1[1]);
SECID1 = (((Position) valPos1).getSecId());
}
} catch (Exception e) {
e.printStackTrace();
fail(q.getQueryString());
}
}
// Create an Index and Run the Same Query as above.
qs.createIndex("secIdIndex", IndexType.FUNCTIONAL, "b.secId", "/portfolios pf, pf.positions.values b");
for (int j = 0; j < queries.length; j++) {
Query q2 = null;
try {
q2 = CacheUtils.getQueryService().newQuery(queries[j]);
QueryObserverImpl observer2 = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer2);
r[j][1] = (SelectResults) q2.execute();
if (observer2.isIndexesUsed != true) {
fail("FAILED: Index NOT Used");
}
resArType2 = (StructType) (r[j][1]).getCollectionType().getElementType();
CacheUtils.log(resArType2);
resArSize2 = (r[j][1]).size();
strAr2 = resArType2.getFieldNames();
set2 = ((r[j][1]).asSet());
Iterator iter = set2.iterator();
while (iter.hasNext()) {
Struct stc2 = (Struct) iter.next();
valPf2 = stc2.get(strAr2[0]);
valPos2 = stc2.get(strAr2[1]);
SECID2 = (((Position) valPos2).getSecId());
}
} catch (Exception e) {
e.printStackTrace();
fail(q2.getQueryString());
}
}
if ((resArType1).equals(resArType2)) {
CacheUtils.log("Both Search Results are of the same Type i.e.--> " + resArType2);
} else {
fail("FAILED:Search result Type is different in both the cases");
}
if ((resArSize1 == resArSize2) || resArSize1 != 0) {
CacheUtils.log("Search Results Size is Non Zero and is of Same Size i.e. Size= " + resArSize1);
} else {
fail("FAILED:Search result size is different in both the cases");
}
iter2 = set2.iterator();
iter1 = set1.iterator();
while (iter1.hasNext()) {
Struct stc2 = (Struct) iter2.next();
Struct stc1 = (Struct) iter1.next();
if (stc2.get(strAr2[0]) != stc1.get(strAr1[0]))
fail("FAILED: In both the Cases the first member of StructSet i.e. Portfolio are different. ");
if (stc2.get(strAr2[1]) != stc1.get(strAr1[1]) || !((Position) stc1.get(strAr1[1])).secId.equals("IBM"))
fail("FAILED: In both the cases either Positions Or secIds obtained are different");
}
CacheUtils.compareResultsOfWithAndWithoutIndex(r, this);
}
use of org.apache.geode.cache.query.types.StructType in project geode by apache.
the class ResultsBagLimitBehaviourJUnitTest method testAddAndGetOccurence.
// Internal method AddAndGetOccurence used for iter evaluating
// only up till the limit
@Test
public void testAddAndGetOccurence() {
ResultsBag bag = getBagObject(String.class);
bag = getBagObject(String.class);
ObjectType elementType = bag.getCollectionType().getElementType();
assertEquals(1, bag.addAndGetOccurence(elementType instanceof StructType ? ((Struct) wrap("one", elementType)).getFieldValues() : wrap("one", elementType)));
bag.add(wrap("two", elementType));
assertEquals(2, bag.addAndGetOccurence(elementType instanceof StructType ? ((Struct) wrap("two", elementType)).getFieldValues() : wrap("two", bag.getCollectionType().getElementType())));
bag.add(wrap("three", bag.getCollectionType().getElementType()));
bag.add(wrap("three", bag.getCollectionType().getElementType()));
assertEquals(3, bag.addAndGetOccurence(elementType instanceof StructType ? ((Struct) wrap("three", elementType)).getFieldValues() : wrap("three", elementType)));
bag.add(wrap(null, bag.getCollectionType().getElementType()));
bag.add(wrap(null, bag.getCollectionType().getElementType()));
assertEquals(3, bag.addAndGetOccurence(elementType instanceof StructType ? ((Struct) wrap(null, elementType)).getFieldValues() : wrap(null, elementType)));
}
use of org.apache.geode.cache.query.types.StructType in project geode by apache.
the class PdxGroupByTestImpl method testAggregateFuncMax.
@Override
@Test
public void testAggregateFuncMax() throws Exception {
Region region = this.createRegion("portfolio", PortfolioPdx.class);
for (int i = 1; i < 200; ++i) {
PortfolioPdx pf = new PortfolioPdx(i);
pf.shortID = (short) ((short) i / 5);
region.put("key-" + i, pf);
}
String queryStr = "select p.status as status, Max(p.ID) as Maxx from /portfolio p where p.ID > 0 group by status ";
QueryService qs = CacheUtils.getQueryService();
Query query = qs.newQuery(queryStr);
CompiledSelect cs = ((DefaultQuery) query).getSelect();
SelectResults sr = (SelectResults) query.execute();
assertTrue(sr.getCollectionType().getElementType().isStructType());
assertEquals(2, sr.size());
Iterator iter = sr.iterator();
Region rgn = CacheUtils.getRegion("portfolio");
int activeMaxID = 0;
int inactiveMaxID = 0;
for (Object o : rgn.values()) {
PortfolioPdx pf = (PortfolioPdx) o;
if (pf.status.equals("active")) {
if (pf.getID() > activeMaxID) {
activeMaxID = pf.getID();
}
} else if (pf.status.equals("inactive")) {
if (pf.getID() > inactiveMaxID) {
inactiveMaxID = pf.getID();
}
} else {
fail("unexpected value of status");
}
}
while (iter.hasNext()) {
Struct struct = (Struct) iter.next();
StructType structType = struct.getStructType();
ObjectType[] fieldTypes = structType.getFieldTypes();
assertEquals("String", fieldTypes[0].getSimpleClassName());
assertEquals("Number", fieldTypes[1].getSimpleClassName());
if (struct.get("status").equals("active")) {
assertEquals(activeMaxID, ((Integer) struct.get("Maxx")).intValue());
} else if (struct.get("status").equals("inactive")) {
assertEquals(inactiveMaxID, ((Integer) struct.get("Maxx")).intValue());
} else {
fail("unexpected value of status");
}
}
ObjectType elementType = sr.getCollectionType().getElementType();
assertTrue(elementType.isStructType());
StructType structType = (StructType) elementType;
ObjectType[] fieldTypes = structType.getFieldTypes();
assertEquals("String", fieldTypes[0].getSimpleClassName());
assertEquals("Number", fieldTypes[1].getSimpleClassName());
}
Aggregations