use of org.apache.geode.cache.EntryDestroyedException in project geode by apache.
the class CompiledSelect method applyProjectionAndAddToResultSet.
// resultSet could be a set or a bag (we have set constructor, or there
// could be a distinct subquery)
// in future, it would be good to simplify this to always work with a bag
// (converting all sets to bags) until the end when we enforce distinct
// The number returned indicates the occurence of the data in the SelectResults
// Thus if the SelectResults is of type ResultsSet or StructSet
// then 1 will indicate that data was added to the results & that was the
// first occurence. For this 0 will indicate that the data was not added
// because it was a duplicate
// If the SelectResults is an instance ResultsBag or StructsBag , the number will
// indicate the occurence. Thus 1 will indicate it being added for first time
// Currently orderBy is present only for StructSet & ResultSet which are
// unique object holders. So the occurence for them can be either 0 or 1 only
private int applyProjectionAndAddToResultSet(ExecutionContext context, SelectResults resultSet, boolean ignoreOrderBy) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
List currrentRuntimeIters = context.getCurrentIterators();
int occurence = 0;
ObjectType elementType = resultSet.getCollectionType().getElementType();
boolean isStruct = elementType != null && elementType.isStructType();
// TODO: Optimize this condition in some clean way
boolean isLinkedStructure = resultSet instanceof Ordered && ((Ordered) resultSet).dataPreordered();
ArrayList evaluatedOrderByClause = null;
OrderByComparator comparator = null;
boolean applyOrderBy = false;
if (this.orderByAttrs != null && !ignoreOrderBy) {
// In case PR order-by will get applied on the coordinator node
// on the cumulative results. Apply the order-by on PR only if
// limit is specified.
Integer limitValue = evaluateLimitValue(context, this.limit);
if (context.getPartitionedRegion() != null && limitValue < 0) {
applyOrderBy = false;
}
applyOrderBy = true;
}
if (this.orderByAttrs != null && !ignoreOrderBy) {
comparator = (OrderByComparator) ((Ordered) resultSet).comparator();
}
if (projAttrs == null) {
int len = currrentRuntimeIters.size();
Object[] values = new Object[len];
for (int i = 0; i < len; i++) {
RuntimeIterator iter = (RuntimeIterator) currrentRuntimeIters.get(i);
values[i] = iter.evaluate(context);
// case of all Pdx objects in cache
if (this.distinct && !((DefaultQuery) context.getQuery()).isRemoteQuery() && !context.getCache().getPdxReadSerialized() && (values[i] instanceof PdxInstance)) {
values[i] = ((PdxInstance) values[i]).getObject();
}
}
// Don't care about Order By for count(*).
if (isCount() && !this.distinct) {
// Counter is local to CompileSelect and not available in ResultSet
// until
// the end of evaluate call to this CompiledSelect object.
this.countStartQueryResult++;
occurence = 1;
} else {
// if order by is present
if (applyOrderBy) {
StructImpl structImpl;
if (this.distinct) {
if (isStruct) {
if (values.length == 1 && values[0] instanceof StructImpl) {
structImpl = (StructImpl) values[0];
comparator.addEvaluatedSortCriteria(structImpl.getFieldValues(), context);
occurence = resultSet.add(structImpl) ? 1 : 0;
} else {
comparator.addEvaluatedSortCriteria(values, context);
occurence = ((StructFields) resultSet).addFieldValues(values) ? 1 : 0;
}
// TODO:Instead of a normal Map containing which holds
// StructImpl object
// use a THashObject with Object[] array hashing stragtegy as we
// are unnnecessarily
// creating objects of type Object[]
} else {
comparator.addEvaluatedSortCriteria(values[0], context);
occurence = resultSet.add(values[0]) ? 1 : 0;
}
} else {
if (isStruct) {
if (values.length == 1 && values[0] instanceof StructImpl) {
structImpl = (StructImpl) values[0];
comparator.addEvaluatedSortCriteria(structImpl.getFieldValues(), context);
occurence = ((Bag) resultSet).addAndGetOccurence(structImpl.getFieldValues());
} else {
comparator.addEvaluatedSortCriteria(values, context);
occurence = ((Bag) resultSet).addAndGetOccurence(values);
}
} else {
comparator.addEvaluatedSortCriteria(values[0], context);
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
} else {
if (isLinkedStructure) {
if (isStruct) {
StructImpl structImpl;
if (values.length == 1 && values[0] instanceof StructImpl) {
structImpl = (StructImpl) values[0];
} else {
structImpl = new StructImpl((StructTypeImpl) elementType, values);
}
if (this.distinct) {
occurence = resultSet.add(structImpl) ? 1 : 0;
} else {
occurence = ((Bag) resultSet).addAndGetOccurence(structImpl);
}
} else {
if (this.distinct) {
occurence = resultSet.add(values[0]) ? 1 : 0;
} else {
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
} else {
if (this.distinct) {
if (isStruct) {
occurence = ((StructFields) resultSet).addFieldValues(values) ? 1 : 0;
} else {
occurence = resultSet.add(values[0]) ? 1 : 0;
}
} else {
if (isStruct) {
occurence = ((Bag) resultSet).addAndGetOccurence(values);
} else {
boolean add = true;
if (context.isCqQueryContext()) {
if (values[0] instanceof Region.Entry) {
Region.Entry e = (Region.Entry) values[0];
if (!e.isDestroyed()) {
try {
values[0] = new CqEntry(e.getKey(), e.getValue());
} catch (EntryDestroyedException ignore) {
// Even though isDestory() check is made, the entry could throw
// EntryDestroyedException if the value becomes null.
add = false;
}
} else {
add = false;
}
}
}
if (add) {
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
}
}
}
}
} else {
// One or more projection attributes
int projCount = projAttrs.size();
Object[] values = new Object[projCount];
for (int i = 0; i < projCount; i++) {
Object[] projDef = (Object[]) projAttrs.get(i);
values[i] = ((CompiledValue) projDef[1]).evaluate(context);
// for remote queries
if (!((DefaultQuery) context.getQuery()).isRemoteQuery()) {
if (this.distinct && values[i] instanceof PdxInstance && !context.getCache().getPdxReadSerialized()) {
values[i] = ((PdxInstance) values[i]).getObject();
} else if (values[i] instanceof PdxString) {
values[i] = ((PdxString) values[i]).toString();
}
}
}
// if order by is present
if (applyOrderBy) {
if (distinct) {
if (isStruct) {
comparator.addEvaluatedSortCriteria(values, context);
// Occurence field is used to identify the corrcet number of
// iterations
// required to implement the limit based on the presence or absence
// of distinct clause
occurence = ((StructFields) resultSet).addFieldValues(values) ? 1 : 0;
} else {
comparator.addEvaluatedSortCriteria(values[0], context);
occurence = resultSet.add(values[0]) ? 1 : 0;
}
} else {
if (isStruct) {
comparator.addEvaluatedSortCriteria(values, context);
occurence = ((Bag) resultSet).addAndGetOccurence(values);
} else {
comparator.addEvaluatedSortCriteria(values[0], context);
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
} else {
if (isLinkedStructure) {
if (isStruct) {
StructImpl structImpl = new StructImpl((StructTypeImpl) elementType, values);
if (this.distinct) {
occurence = resultSet.add(structImpl) ? 1 : 0;
} else {
occurence = ((Bag) resultSet).addAndGetOccurence(structImpl);
}
} else {
if (this.distinct) {
occurence = resultSet.add(values[0]) ? 1 : 0;
} else {
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
} else {
if (this.distinct) {
if (isStruct) {
occurence = ((StructFields) resultSet).addFieldValues(values) ? 1 : 0;
} else {
occurence = resultSet.add(values[0]) ? 1 : 0;
}
} else {
if (isStruct) {
occurence = ((Bag) resultSet).addAndGetOccurence(values);
} else {
occurence = ((Bag) resultSet).addAndGetOccurence(values[0]);
}
}
}
}
}
return occurence;
}
use of org.apache.geode.cache.EntryDestroyedException in project geode by apache.
the class CompiledPath method evaluate.
public Object evaluate(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
CompiledValue rcvr = getReceiver();
Object evalRcvr = rcvr.evaluate(context);
if (context.isCqQueryContext() && (evalRcvr instanceof Region.Entry || evalRcvr instanceof CqEntry)) {
try {
if (evalRcvr instanceof Region.Entry) {
Region.Entry re = (Region.Entry) evalRcvr;
if (re.isDestroyed()) {
return QueryService.UNDEFINED;
}
evalRcvr = re.getValue();
} else if (evalRcvr instanceof CqEntry) {
CqEntry re = (CqEntry) evalRcvr;
evalRcvr = re.getValue();
}
} catch (EntryDestroyedException ede) {
// throw EntryDestroyedException if the value becomes null.
return QueryService.UNDEFINED;
}
}
// if the receiver is an iterator, then use the contrained type
// for attribute evaluation instead of the runtime type
// RuntimeIterator cmpItr = null;
// if (rcvr.getType() == ID)
// {
// CompiledValue resolvedRcvr = context.resolve(((CompiledID)rcvr).getId());
// if (resolvedRcvr != null && resolvedRcvr.getType() == ITERATOR)
// cmpItr = ((RuntimeIterator)resolvedRcvr);
// }
// if (rcvr.getType() == ITERATOR)
// cmpItr = (RuntimeIterator)rcvr;
// if (cmpItr != null)
// {
// Class constraint = cmpItr.getBaseCollection().getConstraint();
// return PathUtils.evaluateAttribute(evalRcvr,
// constraint,
// getTailID());
// }
Object obj = PathUtils.evaluateAttribute(evalRcvr, getTailID());
// check for BucketRegion substitution
PartitionedRegion pr = context.getPartitionedRegion();
if (pr != null && (obj instanceof Region)) {
if (pr.getFullPath().equals(((Region) obj).getFullPath())) {
obj = context.getBucketRegion();
}
}
return obj;
}
use of org.apache.geode.cache.EntryDestroyedException in project geode by apache.
the class CompactRangeIndex method addToResultsFromEntries.
/*
*
* @param lowerBoundKey the index key to match on for a lower bound on a ranged query, otherwise
* the key to match on
*
* @param upperBoundKey the index key to match on for an upper bound on a ranged query, otherwise
* null
*
* @param lowerBoundOperator the operator to use to determine a match against the lower bound
*
* @param upperBoundOperator the operator to use to determine a match against the upper bound
*/
private void addToResultsFromEntries(Object lowerBoundKey, Object upperBoundKey, int lowerBoundOperator, int upperBoundOperator, CloseableIterator<IndexStoreEntry> entriesIter, Collection result, CompiledValue iterOps, RuntimeIterator runtimeItr, ExecutionContext context, List projAttrib, SelectResults intermediateResults, boolean isIntersection, int limit) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
QueryObserver observer = QueryObserverHolder.getInstance();
boolean limitApplied = false;
if (entriesIter == null || (limitApplied = verifyLimit(result, limit))) {
if (limitApplied) {
if (observer != null) {
observer.limitAppliedAtIndexLevel(this, limit, result);
}
}
return;
}
Set seenKey = null;
if (IndexManager.IS_TEST_EXPANSION) {
seenKey = new HashSet();
}
while (entriesIter.hasNext()) {
try {
// Check if query execution on this thread is canceled.
QueryMonitor.isQueryExecutionCanceled();
if (IndexManager.testHook != null) {
if (this.region.getCache().getLogger().fineEnabled()) {
this.region.getCache().getLogger().fine("IndexManager TestHook is set in addToResultsFromEntries.");
}
IndexManager.testHook.hook(11);
}
IndexStoreEntry indexEntry = null;
try {
indexEntry = entriesIter.next();
} catch (NoSuchElementException ignore) {
// Continue from while.
continue;
}
Object value = indexEntry.getDeserializedValue();
if (IndexManager.IS_TEST_EXPANSION) {
Object rk = indexEntry.getDeserializedRegionKey();
if (seenKey.contains(rk)) {
continue;
}
seenKey.add(rk);
List expandedResults = expandValue(context, lowerBoundKey, upperBoundKey, lowerBoundOperator, upperBoundOperator, value);
Iterator iterator = ((Collection) expandedResults).iterator();
while (iterator.hasNext()) {
value = iterator.next();
if (value != null) {
boolean ok = true;
if (runtimeItr != null) {
runtimeItr.setCurrent(value);
}
if (ok && runtimeItr != null && iterOps != null) {
ok = QueryUtils.applyCondition(iterOps, context);
}
if (ok) {
if (context != null && context.isCqQueryContext()) {
result.add(new CqEntry(indexEntry.getDeserializedRegionKey(), value));
} else {
applyProjection(projAttrib, context, result, value, intermediateResults, isIntersection);
}
if (verifyLimit(result, limit)) {
observer.limitAppliedAtIndexLevel(this, limit, result);
return;
}
}
}
}
} else {
if (value != null) {
boolean ok = true;
if (indexEntry.isUpdateInProgress() || TEST_ALWAYS_UPDATE_IN_PROGRESS) {
IndexInfo indexInfo = (IndexInfo) context.cacheGet(CompiledValue.INDEX_INFO);
if (runtimeItr == null) {
runtimeItr = getRuntimeIteratorForThisIndex(context, indexInfo);
if (runtimeItr == null) {
// could not match index with iterator
throw new QueryInvocationTargetException("Query alias's must be used consistently");
}
}
runtimeItr.setCurrent(value);
// Verify index key in region entry value.
ok = evaluateEntry((IndexInfo) indexInfo, context, null);
}
if (runtimeItr != null) {
runtimeItr.setCurrent(value);
}
if (ok && runtimeItr != null && iterOps != null) {
ok = QueryUtils.applyCondition(iterOps, context);
}
if (ok) {
if (context != null && context.isCqQueryContext()) {
result.add(new CqEntry(indexEntry.getDeserializedRegionKey(), value));
} else {
if (IndexManager.testHook != null) {
IndexManager.testHook.hook(200);
}
applyProjection(projAttrib, context, result, value, intermediateResults, isIntersection);
}
if (verifyLimit(result, limit)) {
observer.limitAppliedAtIndexLevel(this, limit, result);
return;
}
}
}
}
} catch (ClassCastException | EntryDestroyedException ignore) {
// ignore it
}
}
}
use of org.apache.geode.cache.EntryDestroyedException in project geode by apache.
the class Coder method zRangeResponse.
public static ByteBuf zRangeResponse(ByteBufAllocator alloc, Collection<?> list, boolean withScores) {
if (list.isEmpty())
return Coder.getEmptyArrayResponse(alloc);
ByteBuf buffer = alloc.buffer();
buffer.writeByte(Coder.ARRAY_ID);
ByteBuf tmp = alloc.buffer();
int size = 0;
for (Object entry : list) {
ByteArrayWrapper key;
DoubleWrapper score;
if (entry instanceof Entry) {
try {
key = (ByteArrayWrapper) ((Entry<?, ?>) entry).getKey();
score = (DoubleWrapper) ((Entry<?, ?>) entry).getValue();
} catch (EntryDestroyedException e) {
continue;
}
} else {
Object[] fieldVals = ((Struct) entry).getFieldValues();
key = (ByteArrayWrapper) fieldVals[0];
score = (DoubleWrapper) fieldVals[1];
}
byte[] byteAr = key.toBytes();
tmp.writeByte(Coder.BULK_STRING_ID);
tmp.writeBytes(intToBytes(byteAr.length));
tmp.writeBytes(Coder.CRLFar);
tmp.writeBytes(byteAr);
tmp.writeBytes(Coder.CRLFar);
size++;
if (withScores) {
String scoreString = score.toString();
byte[] scoreAr = stringToBytes(scoreString);
tmp.writeByte(Coder.BULK_STRING_ID);
tmp.writeBytes(intToBytes(scoreString.length()));
tmp.writeBytes(Coder.CRLFar);
tmp.writeBytes(scoreAr);
tmp.writeBytes(Coder.CRLFar);
size++;
}
}
buffer.writeBytes(intToBytes(size));
buffer.writeBytes(Coder.CRLFar);
buffer.writeBytes(tmp);
tmp.release();
return buffer;
}
use of org.apache.geode.cache.EntryDestroyedException in project geode by apache.
the class LocalRegion method containsTombstone.
public boolean containsTombstone(Object key) {
checkReadiness();
checkForNoAccess();
if (!this.concurrencyChecksEnabled) {
return false;
} else {
try {
Entry entry = getDataView().getEntry(getKeyInfo(key), this, true);
return entry != null && entry.getValue() == Token.TOMBSTONE;
} catch (EntryDestroyedException ignore) {
return true;
}
}
}
Aggregations