Search in sources :

Example 21 with QueryException

use of org.apache.geode.cache.query.QueryException in project geode by apache.

the class PRQueryProcessor method executeQueryOnBuckets.

private void executeQueryOnBuckets(Collection<Collection> resultCollector, ExecutionContext context) throws ForceReattemptException, QueryInvocationTargetException, QueryException {
    // Check if QueryMonitor is enabled, if so add query to be monitored.
    QueryMonitor queryMonitor = null;
    context.setCqQueryContext(query.isCqQuery());
    if (GemFireCacheImpl.getInstance() != null) {
        queryMonitor = GemFireCacheImpl.getInstance().getQueryMonitor();
    }
    try {
        if (queryMonitor != null) {
            // Add current thread to be monitored by QueryMonitor.
            queryMonitor.monitorQueryThread(Thread.currentThread(), query);
        }
        Object results = query.executeUsingContext(context);
        synchronized (resultCollector) {
            // TODO: In what situation would the results object itself be undefined?
            // The elements of the results can be undefined , but not the resultset itself
            this.resultType = ((SelectResults) results).getCollectionType().getElementType();
            resultCollector.add((Collection) results);
        }
        isIndexUsedForLocalQuery = ((QueryExecutionContext) context).isIndexUsed();
    } catch (BucketMovedException bme) {
        if (logger.isDebugEnabled()) {
            logger.debug("Query targeted local bucket not found. {}", bme.getMessage(), bme);
        }
        throw new ForceReattemptException("Query targeted local bucket not found." + bme.getMessage(), bme);
    } catch (RegionDestroyedException rde) {
        throw new QueryInvocationTargetException("The Region on which query is executed may have been destroyed." + rde.getMessage(), rde);
    } catch (QueryException qe) {
        // Check if PR is locally destroyed.
        if (pr.isLocallyDestroyed || pr.isClosed) {
            throw new ForceReattemptException("Local Partition Region or the targeted bucket has been moved");
        }
        throw qe;
    } finally {
        if (queryMonitor != null) {
            queryMonitor.stopMonitoringQueryThread(Thread.currentThread(), query);
        }
    }
}
Also used : QueryException(org.apache.geode.cache.query.QueryException) SelectResults(org.apache.geode.cache.query.SelectResults) RegionDestroyedException(org.apache.geode.cache.RegionDestroyedException) QueryMonitor(org.apache.geode.cache.query.internal.QueryMonitor) BucketMovedException(org.apache.geode.internal.cache.execute.BucketMovedException) QueryInvocationTargetException(org.apache.geode.cache.query.QueryInvocationTargetException)

Example 22 with QueryException

use of org.apache.geode.cache.query.QueryException in project geode by apache.

the class PRQueryProcessor method executeWithThreadPool.

private void executeWithThreadPool(Collection<Collection> resultCollector) throws QueryException, InterruptedException, ForceReattemptException {
    if (Thread.interrupted())
        throw new InterruptedException();
    java.util.List callableTasks = buildCallableTaskList(resultCollector);
    ExecutorService execService = PRQueryExecutor.getExecutorService();
    boolean reattemptNeeded = false;
    ForceReattemptException fre = null;
    if (callableTasks != null && !callableTasks.isEmpty()) {
        List futures = null;
        try {
            futures = execService.invokeAll(callableTasks, 300, TimeUnit.SECONDS);
        } catch (RejectedExecutionException rejectedExecutionEx) {
            throw rejectedExecutionEx;
        }
        if (futures != null) {
            Iterator itr = futures.iterator();
            while (itr.hasNext() && !execService.isShutdown() && !execService.isTerminated()) {
                // this._prds.partitionedRegion.checkReadiness();
                Future fut = (Future) itr.next();
                QueryTask.BucketQueryResult bqr = null;
                try {
                    bqr = (QueryTask.BucketQueryResult) fut.get(BUCKET_QUERY_TIMEOUT, TimeUnit.SECONDS);
                    // if (retry.booleanValue()) {
                    // reattemptNeeded = true;
                    // fre = (ForceReattemptException)bqr.getException();
                    // } else {
                    // handles an exception if there was one,
                    bqr.handleAndThrowException();
                    // }
                    if (bqr.retry) {
                        reattemptNeeded = true;
                    }
                } catch (TimeoutException e) {
                    throw new InternalGemFireException(LocalizedStrings.PRQueryProcessor_TIMED_OUT_WHILE_EXECUTING_QUERY_TIME_EXCEEDED_0.toLocalizedString(BUCKET_QUERY_TIMEOUT), e);
                } catch (ExecutionException ee) {
                    Throwable cause = ee.getCause();
                    if (cause instanceof QueryException) {
                        throw (QueryException) cause;
                    } else {
                        throw new InternalGemFireException(LocalizedStrings.PRQueryProcessor_GOT_UNEXPECTED_EXCEPTION_WHILE_EXECUTING_QUERY_ON_PARTITIONED_REGION_BUCKET.toLocalizedString(), cause);
                    }
                }
            }
            CompiledSelect cs = this.query.getSimpleSelect();
            if (cs != null && (cs.isOrderBy() || cs.isGroupBy())) {
                ExecutionContext context = new QueryExecutionContext(this.parameters, pr.getCache());
                int limit = this.query.getLimit(parameters);
                Collection mergedResults = coalesceOrderedResults(resultCollector, context, cs, limit);
                resultCollector.clear();
                resultCollector.add(mergedResults);
            }
        }
    }
    if (execService == null || execService.isShutdown() || execService.isTerminated()) {
        this._prds.partitionedRegion.checkReadiness();
    }
    if (reattemptNeeded) {
        throw fre;
    }
}
Also used : InternalGemFireException(org.apache.geode.InternalGemFireException) QueryExecutionContext(org.apache.geode.cache.query.internal.QueryExecutionContext) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) List(java.util.List) QueryException(org.apache.geode.cache.query.QueryException) ExecutionContext(org.apache.geode.cache.query.internal.ExecutionContext) QueryExecutionContext(org.apache.geode.cache.query.internal.QueryExecutionContext) ExecutorService(java.util.concurrent.ExecutorService) Iterator(java.util.Iterator) CompiledSelect(org.apache.geode.cache.query.internal.CompiledSelect) Future(java.util.concurrent.Future) Collection(java.util.Collection) ArrayList(java.util.ArrayList) List(java.util.List) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException)

Example 23 with QueryException

use of org.apache.geode.cache.query.QueryException in project geode by apache.

the class PRQueryDUnitHelper method getCacheSerializableRunnableForPRInvalidQuery.

/**
   * This function <br>
   * 1. Creates & executes a query with Logical Operators on the given PR Region 2. Executes the
   * same query on the local region <br>
   * 3. Compares the appropriate resultSet <br>
   * 
   * @param regionName
   *
   *
   * @return cacheSerializable object
   */
public CacheSerializableRunnable getCacheSerializableRunnableForPRInvalidQuery(final String regionName) {
    SerializableRunnable PrRegion = new CacheSerializableRunnable("PRQuery") {

        @Override
        public void run2() throws CacheException {
            Cache cache = getCache();
            // Querying the PR region with an Invalid query string
            String query = "INVALID QUERY";
            Region region = cache.getRegion(regionName);
            try {
                region.query(query);
                fail("PRQueryDUnitHelper#getCacheSerializableRunnableForPRInvalidQuery: InvalidQueryException expected");
            } catch (QueryInvalidException e) {
            // pass
            } catch (QueryException qe) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().error("PRQueryDUnitHelper#getCacheSerializableRunnableForPRInvalidQuery: Caught another Exception while querying , Exception is " + qe, qe);
                fail("PRQueryDUnitHelper#getCacheSerializableRunnableForPRInvalidQuery: Caught another Exception while querying , Exception is " + qe);
            }
        }
    };
    return (CacheSerializableRunnable) PrRegion;
}
Also used : QueryException(org.apache.geode.cache.query.QueryException) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryInvalidException(org.apache.geode.cache.query.QueryInvalidException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) LocalRegion(org.apache.geode.internal.cache.LocalRegion) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) Cache(org.apache.geode.cache.Cache)

Example 24 with QueryException

use of org.apache.geode.cache.query.QueryException in project geode by apache.

the class PRQueryDUnitTest method testSimulatedDataLossBeforeQueryProcessor.

/**
   * Simulate a data loss (buckets 0 and 2) before the PRQueryEvaluator begins the query loop
   * 
   * @throws Exception
   */
@Test
public void testSimulatedDataLossBeforeQueryProcessor() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    VM accessor = host.getVM(1);
    VM datastore1 = host.getVM(2);
    VM datastore2 = host.getVM(3);
    final int totalBuckets = 11;
    CacheSerializableRunnable createPR = new CacheSerializableRunnable("Create PR") {

        @Override
        public void run2() throws CacheException {
            AttributesFactory attr = new AttributesFactory();
            attr.setValueConstraint(String.class);
            PartitionAttributes prAttr = new PartitionAttributesFactory().setRedundantCopies(1).setTotalNumBuckets(totalBuckets).create();
            attr.setPartitionAttributes(prAttr);
            getCache().createRegion(rName, attr.create());
        }
    };
    datastore1.invoke(createPR);
    datastore2.invoke(createPR);
    accessor.invoke(new CacheSerializableRunnable("Create accessor PR") {

        @Override
        public void run2() throws CacheException {
            AttributesFactory attr = new AttributesFactory();
            attr.setValueConstraint(String.class);
            PartitionAttributes prAttr = new PartitionAttributesFactory().setRedundantCopies(1).setTotalNumBuckets(totalBuckets).setLocalMaxMemory(0).create();
            attr.setPartitionAttributes(prAttr);
            getCache().createRegion(rName, attr.create());
        }
    });
    // add expected exception strings
    final IgnoredException ex = IgnoredException.addIgnoredException("Data loss detected", accessor);
    accessor.invoke(new SerializableCallable("Create bucket and test dataloss query") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            // Create bucket one
            pr.put(new Integer(1), "one");
            Object[] params = new Object[0];
            final DefaultQuery query = (DefaultQuery) getCache().getQueryService().newQuery("select distinct * from " + pr.getFullPath());
            final SelectResults results = query.getSimpleSelect().getEmptyResultSet(params, getCache(), query);
            // Fake data loss
            final HashSet<Integer> buckets = new HashSet<Integer>();
            for (int i = 0; i < 3; i++) {
                buckets.add(new Integer(i));
            }
            try {
                PartitionedRegionQueryEvaluator qe = new PartitionedRegionQueryEvaluator(pr.getSystem(), pr, query, params, results, buckets);
                qe.queryBuckets(null);
                assertTrue(false);
            } catch (QueryException expected) {
            }
            // getLogWriter().info("Select results are: " + results);
            return Boolean.TRUE;
        }
    });
    ex.remove();
}
Also used : DefaultQuery(org.apache.geode.cache.query.internal.DefaultQuery) CacheException(org.apache.geode.cache.CacheException) PartitionAttributes(org.apache.geode.cache.PartitionAttributes) Host(org.apache.geode.test.dunit.Host) PartitionedRegionQueryEvaluator(org.apache.geode.internal.cache.PartitionedRegionQueryEvaluator) CacheException(org.apache.geode.cache.CacheException) QueryException(org.apache.geode.cache.query.QueryException) IgnoredException(org.apache.geode.test.dunit.IgnoredException) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) QueryException(org.apache.geode.cache.query.QueryException) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) IgnoredException(org.apache.geode.test.dunit.IgnoredException) HashSet(java.util.HashSet) Test(org.junit.Test) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Example 25 with QueryException

use of org.apache.geode.cache.query.QueryException in project geode by apache.

the class PRQueryDUnitHelper method getCacheSerializableRunnableForPRCountStarQueries.

public CacheSerializableRunnable getCacheSerializableRunnableForPRCountStarQueries(final String regionName, final String localRegion) {
    SerializableRunnable PrRegion = new CacheSerializableRunnable("PRCountStarQuery") {

        public void run2() throws CacheException {
            Cache cache = getCache();
            // Querying the localRegion and the PR region
            String[] queries = new String[] { "select COUNT(*) from /" + regionName, "select COUNT(*) from /" + regionName + " where ID > 0", "select COUNT(*) from /" + regionName + " where ID > 0 AND status='active'", "select COUNT(*) from /" + regionName + " where ID > 0 OR status='active'", "select COUNT(*) from /" + regionName + " where ID > 0 AND status LIKE 'act%'", "select COUNT(*) from /" + regionName + " where ID > 0 OR status LIKE 'ina%'", "select COUNT(*) from /" + regionName + " where ID IN SET(1, 2, 3, 4, 5)", "select COUNT(*) from /" + regionName + " where NOT (ID > 5)", "select DISTINCT COUNT(*) from /" + regionName + " where ID > 0", "select DISTINCT COUNT(*) from /" + regionName + " where ID > 0 AND status='active'", "select DISTINCT COUNT(*) from /" + regionName + " where ID > 0 OR status='active'", "select DISTINCT COUNT(*) from /" + regionName + " where ID > 0 AND status LIKE 'act%'", "select DISTINCT COUNT(*) from /" + regionName + " where ID > 0 OR status LIKE 'ina%'", "select DISTINCT COUNT(*) from /" + regionName + " where ID IN SET(1, 2, 3, 4, 5)", "select DISTINCT COUNT(*) from /" + regionName + " where NOT (ID > 5)", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND pos.secId = 'IBM'", "select DISTINCT COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND pos.secId = 'IBM'", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND pos.secId = 'IBM' LIMIT 5", "select DISTINCT COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND pos.secId = 'IBM' ORDER BY p.ID", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND p.status = 'active' AND pos.secId = 'IBM'", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 AND p.status = 'active' OR pos.secId = 'IBM'", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 OR p.status = 'active' OR pos.secId = 'IBM'", "select COUNT(*) from /" + regionName + " p, p.positions.values pos where p.ID > 0 OR p.status = 'active' OR pos.secId = 'IBM' LIMIT 150" };
            Object[][] r = new Object[queries.length][2];
            Region region = cache.getRegion(regionName);
            assertNotNull(region);
            final String[] expectedExceptions = new String[] { RegionDestroyedException.class.getName(), ReplyException.class.getName(), CacheClosedException.class.getName(), ForceReattemptException.class.getName(), QueryInvocationTargetException.class.getName() };
            for (final String expectedException : expectedExceptions) {
                getCache().getLogger().info("<ExpectedException action=add>" + expectedException + "</ExpectedException>");
            }
            QueryService qs = getCache().getQueryService();
            Object[] params;
            try {
                for (int j = 0; j < queries.length; j++) {
                    String qStr = null;
                    synchronized (region) {
                        // Execute on PR region.
                        qStr = queries[j];
                        SelectResults sr = (SelectResults) qs.newQuery(qStr).execute();
                        r[j][0] = sr;
                        // Execute on local region.
                        qStr = queries[j];
                        SelectResults srr = (SelectResults) qs.newQuery(qStr.replace(regionName, localRegion)).execute();
                        r[j][1] = srr;
                    }
                }
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
                StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
                ssORrs.CompareCountStarQueryResultsWithoutAndWithIndexes(r, queries.length, true, queries);
            } catch (QueryInvocationTargetException e) {
                // not it's okay
                throw new TestException("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception", e);
            } catch (QueryException e) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().error("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying" + e, e);
                throw new TestException("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception", e);
            } catch (RegionDestroyedException rde) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ", rde);
            } catch (CancelException cce) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ", cce);
            } finally {
                for (final String expectedException : expectedExceptions) {
                    getCache().getLogger().info("<ExpectedException action=remove>" + expectedException + "</ExpectedException>");
                }
            }
        }
    };
    return (CacheSerializableRunnable) PrRegion;
}
Also used : StructSetOrResultsSet(org.apache.geode.cache.query.functional.StructSetOrResultsSet) TestException(util.TestException) RegionDestroyedException(org.apache.geode.cache.RegionDestroyedException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryInvocationTargetException(org.apache.geode.cache.query.QueryInvocationTargetException) QueryException(org.apache.geode.cache.query.QueryException) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryService(org.apache.geode.cache.query.QueryService) LocalRegion(org.apache.geode.internal.cache.LocalRegion) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) CancelException(org.apache.geode.CancelException) Cache(org.apache.geode.cache.Cache)

Aggregations

QueryException (org.apache.geode.cache.query.QueryException)35 RegionDestroyedException (org.apache.geode.cache.RegionDestroyedException)15 CacheSerializableRunnable (org.apache.geode.cache30.CacheSerializableRunnable)15 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)15 Region (org.apache.geode.cache.Region)14 Cache (org.apache.geode.cache.Cache)13 QueryInvocationTargetException (org.apache.geode.cache.query.QueryInvocationTargetException)13 SelectResults (org.apache.geode.cache.query.SelectResults)13 QueryService (org.apache.geode.cache.query.QueryService)12 SerializableRunnable (org.apache.geode.test.dunit.SerializableRunnable)12 LocalRegion (org.apache.geode.internal.cache.LocalRegion)11 CancelException (org.apache.geode.CancelException)10 TestException (util.TestException)10 StructSetOrResultsSet (org.apache.geode.cache.query.functional.StructSetOrResultsSet)8 IndexMaintenanceException (org.apache.geode.cache.query.IndexMaintenanceException)6 HashSet (java.util.HashSet)4 Iterator (java.util.Iterator)4 CacheException (org.apache.geode.cache.CacheException)4 Function (org.apache.geode.cache.execute.Function)4 ForceReattemptException (org.apache.geode.internal.cache.ForceReattemptException)4