use of org.apache.geode.internal.cache.BucketNotFoundException in project geode by apache.
the class AbstractPartitionedRepositoryManager method getRepository.
/**
* Return the repository for a given user bucket
*/
protected IndexRepository getRepository(Integer bucketId) throws BucketNotFoundException {
IndexRepository repo = indexRepositories.get(bucketId);
if (repo != null && !repo.isClosed()) {
return repo;
}
repo = computeRepository(bucketId);
if (repo == null) {
throw new BucketNotFoundException("Unable to find lucene index because no longer primary for bucket " + bucketId);
}
return repo;
}
use of org.apache.geode.internal.cache.BucketNotFoundException in project geode by apache.
the class AbstractPartitionedRepositoryManager method getRepositories.
@Override
public Collection<IndexRepository> getRepositories(RegionFunctionContext ctx) throws BucketNotFoundException {
Region<Object, Object> region = ctx.getDataSet();
Set<Integer> buckets = ((InternalRegionFunctionContext) ctx).getLocalBucketSet(region);
ArrayList<IndexRepository> repos = new ArrayList<IndexRepository>(buckets.size());
for (Integer bucketId : buckets) {
BucketRegion userBucket = userRegion.getDataStore().getLocalBucketById(bucketId);
if (userBucket == null) {
throw new BucketNotFoundException("User bucket was not found for region " + region + "bucket id " + bucketId);
} else {
repos.add(getRepository(userBucket.getId()));
}
}
return repos;
}
use of org.apache.geode.internal.cache.BucketNotFoundException in project geode by apache.
the class LuceneEventListener method process.
protected boolean process(final List<AsyncEvent> events) {
// Try to get a PDX instance if possible, rather than a deserialized object
DefaultQuery.setPdxReadSerialized(true);
Set<IndexRepository> affectedRepos = new HashSet<IndexRepository>();
try {
for (AsyncEvent event : events) {
Region region = event.getRegion();
Object key = event.getKey();
Object callbackArgument = event.getCallbackArgument();
IndexRepository repository = repositoryManager.getRepository(region, key, callbackArgument);
Object value = getValue(region.getEntry(key));
if (value != null) {
repository.update(key, value);
} else {
repository.delete(key);
}
affectedRepos.add(repository);
}
for (IndexRepository repo : affectedRepos) {
repo.commit();
}
return true;
} catch (BucketNotFoundException | RegionDestroyedException | PrimaryBucketException e) {
logger.debug("Bucket not found while saving to lucene index: " + e.getMessage(), e);
return false;
} catch (CacheClosedException e) {
logger.debug("Unable to save to lucene index, cache has been closed", e);
return false;
} catch (AlreadyClosedException e) {
logger.debug("Unable to commit, the lucene index is already closed", e);
return false;
} catch (IOException e) {
throw new InternalGemFireError("Unable to save to lucene index", e);
} finally {
DefaultQuery.setPdxReadSerialized(false);
}
}
use of org.apache.geode.internal.cache.BucketNotFoundException in project geode by apache.
the class LuceneEventListenerJUnitTest method shouldHandleBucketNotFoundExceptionWithoutLoggingError.
@Test
public void shouldHandleBucketNotFoundExceptionWithoutLoggingError() throws BucketNotFoundException {
RepositoryManager manager = Mockito.mock(RepositoryManager.class);
Logger log = Mockito.mock(Logger.class);
Mockito.when(manager.getRepository(any(), any(), any())).thenThrow(BucketNotFoundException.class);
LuceneEventListener listener = new LuceneEventListener(manager);
listener.logger = log;
AsyncEvent event = Mockito.mock(AsyncEvent.class);
boolean result = listener.processEvents(Arrays.asList(new AsyncEvent[] { event }));
assertFalse(result);
verify(log, never()).error(anyString(), any(Exception.class));
}
use of org.apache.geode.internal.cache.BucketNotFoundException in project geode by apache.
the class LuceneQueryFunction method execute.
@Override
public void execute(FunctionContext context) {
RegionFunctionContext ctx = (RegionFunctionContext) context;
ResultSender<TopEntriesCollector> resultSender = ctx.getResultSender();
Region region = ctx.getDataSet();
LuceneFunctionContext<IndexResultCollector> searchContext = (LuceneFunctionContext) ctx.getArguments();
if (searchContext == null) {
throw new IllegalArgumentException("Missing search context");
}
LuceneQueryProvider queryProvider = searchContext.getQueryProvider();
if (queryProvider == null) {
throw new IllegalArgumentException("Missing query provider");
}
LuceneIndexImpl index = getLuceneIndex(region, searchContext);
if (index == null) {
throw new LuceneIndexNotFoundException(searchContext.getIndexName(), region.getFullPath());
}
RepositoryManager repoManager = index.getRepositoryManager();
LuceneIndexStats stats = index.getIndexStats();
Query query = getQuery(queryProvider, index);
if (logger.isDebugEnabled()) {
logger.debug("Executing lucene query: {}, on region {}", query, region.getFullPath());
}
int resultLimit = searchContext.getLimit();
CollectorManager manager = (searchContext == null) ? null : searchContext.getCollectorManager();
if (manager == null) {
manager = new TopEntriesCollectorManager(null, resultLimit);
}
Collection<IndexResultCollector> results = new ArrayList<>();
TopEntriesCollector mergedResult = null;
try {
long start = stats.startQuery();
Collection<IndexRepository> repositories = null;
try {
repositories = repoManager.getRepositories(ctx);
for (IndexRepository repo : repositories) {
IndexResultCollector collector = manager.newCollector(repo.toString());
if (logger.isDebugEnabled()) {
logger.debug("Executing search on repo: " + repo.toString());
}
repo.query(query, resultLimit, collector);
results.add(collector);
}
mergedResult = (TopEntriesCollector) manager.reduce(results);
} finally {
stats.endQuery(start, mergedResult == null ? 0 : mergedResult.size());
}
stats.incNumberOfQueryExecuted();
resultSender.lastResult(mergedResult);
} catch (IOException | BucketNotFoundException | CacheClosedException | PrimaryBucketException e) {
logger.debug("Exception during lucene query function", e);
throw new InternalFunctionInvocationTargetException(e);
}
}
Aggregations