use of org.apache.geode.internal.cache.execute.InternalRegionFunctionContext in project geode by apache.
the class AbstractPartitionedRepositoryManager method getRepositories.
@Override
public Collection<IndexRepository> getRepositories(RegionFunctionContext ctx) throws BucketNotFoundException {
Region<Object, Object> region = ctx.getDataSet();
Set<Integer> buckets = ((InternalRegionFunctionContext) ctx).getLocalBucketSet(region);
ArrayList<IndexRepository> repos = new ArrayList<IndexRepository>(buckets.size());
for (Integer bucketId : buckets) {
BucketRegion userBucket = userRegion.getDataStore().getLocalBucketById(bucketId);
if (userBucket == null) {
throw new BucketNotFoundException("User bucket was not found for region " + region + "bucket id " + bucketId);
} else {
repos.add(getRepository(userBucket.getId()));
}
}
return repos;
}
use of org.apache.geode.internal.cache.execute.InternalRegionFunctionContext in project geode by apache.
the class QueryFunction method execute.
@Override
public void execute(FunctionContext context) {
try {
String[] args = (String[]) context.getArguments();
String queryString = args[0];
String bucketSet = args[1];
InternalRegionFunctionContext irfc = (InternalRegionFunctionContext) context;
LocalRegion localRegion = (LocalRegion) irfc.getDataSet();
boolean partitioned = localRegion.getDataPolicy().withPartitioning();
Query query = CacheFactory.getAnyInstance().getQueryService().newQuery(queryString);
Object result = partitioned ? query.execute((InternalRegionFunctionContext) context) : query.execute();
ResultSender<Object> sender = context.getResultSender();
HeapDataOutputStream buf = new HeapDataOutputStream(CHUNK_SIZE, null);
Iterator<Object> iter = ((SelectResults) result).asList().iterator();
while (iter.hasNext()) {
Object row = iter.next();
DataSerializer.writeObject(row, buf);
if (buf.size() > CHUNK_SIZE) {
sender.sendResult(buf.toByteArray());
logger.debug("OQL query=" + queryString + " bucket set=" + bucketSet + " sendResult(), data size=" + buf.size());
buf.reset();
}
}
sender.lastResult(buf.toByteArray());
logger.debug("OQL query=" + queryString + " bucket set=" + bucketSet + " lastResult(), data size=" + buf.size());
buf.reset();
} catch (Exception e) {
throw new FunctionException(e);
}
}
use of org.apache.geode.internal.cache.execute.InternalRegionFunctionContext in project geode by apache.
the class RetrieveRegionFunction method execute.
@Override
public void execute(FunctionContext context) {
String[] args = (String[]) context.getArguments();
String where = args[0];
String taskDesc = args[1];
InternalRegionFunctionContext irfc = (InternalRegionFunctionContext) context;
LocalRegion localRegion = (LocalRegion) irfc.getDataSet();
boolean partitioned = localRegion.getDataPolicy().withPartitioning();
if (where.trim().isEmpty())
retrieveFullRegion(irfc, partitioned, taskDesc);
else
retrieveRegionWithWhereClause(irfc, localRegion, partitioned, where, taskDesc);
}
use of org.apache.geode.internal.cache.execute.InternalRegionFunctionContext in project geode by apache.
the class PartitionedRepositoryManagerJUnitTest method getByRegion.
@Test
public void getByRegion() throws BucketNotFoundException {
setUpMockBucket(0);
setUpMockBucket(1);
Set<Integer> buckets = new LinkedHashSet<Integer>(Arrays.asList(0, 1));
InternalRegionFunctionContext ctx = Mockito.mock(InternalRegionFunctionContext.class);
when(ctx.getLocalBucketSet((any()))).thenReturn(buckets);
Collection<IndexRepository> repos = repoManager.getRepositories(ctx);
assertEquals(2, repos.size());
Iterator<IndexRepository> itr = repos.iterator();
IndexRepositoryImpl repo0 = (IndexRepositoryImpl) itr.next();
IndexRepositoryImpl repo1 = (IndexRepositoryImpl) itr.next();
assertNotNull(repo0);
assertNotNull(repo1);
assertNotEquals(repo0, repo1);
checkRepository(repo0, 0);
checkRepository(repo1, 1);
}
Aggregations