use of org.apache.geode.spark.connector.internal.RegionMetadata in project geode by apache.
the class RetrieveRegionMetadataFunction method execute.
@Override
public void execute(FunctionContext context) {
LocalRegion region = (LocalRegion) ((InternalRegionFunctionContext) context).getDataSet();
String regionPath = region.getFullPath();
boolean isPartitioned = region.getDataPolicy().withPartitioning();
String kTypeName = getTypeClassName(region.getAttributes().getKeyConstraint());
String vTypeName = getTypeClassName(region.getAttributes().getValueConstraint());
RegionMetadata metadata;
if (!isPartitioned) {
metadata = new RegionMetadata(regionPath, false, 0, null, kTypeName, vTypeName);
} else {
PartitionedRegion pregion = (PartitionedRegion) region;
int totalBuckets = pregion.getAttributes().getPartitionAttributes().getTotalNumBuckets();
Map<Integer, List<BucketServerLocation66>> bucketMap = pregion.getRegionAdvisor().getAllClientBucketProfiles();
HashMap<ServerLocation, HashSet<Integer>> serverMap = bucketServerMap2ServerBucketSetMap(bucketMap);
metadata = new RegionMetadata(regionPath, true, totalBuckets, serverMap, kTypeName, vTypeName);
}
ResultSender<RegionMetadata> sender = context.getResultSender();
sender.lastResult(metadata);
}
Aggregations