use of org.apache.geode.cache.execute.RegionFunctionContext in project geode by apache.
the class TestFunction method execute2.
public void execute2(FunctionContext context) {
if (context instanceof RegionFunctionContext) {
RegionFunctionContext rfContext = (RegionFunctionContext) context;
rfContext.getDataSet().getCache().getLogger().info("Executing function : TestFunction2.execute " + rfContext);
if (rfContext.getArguments() instanceof Boolean) {
/* return rfContext.getArguments(); */
if (hasResult()) {
rfContext.getResultSender().lastResult((Serializable) rfContext.getArguments());
} else {
rfContext.getDataSet().getCache().getLogger().info("Executing function : TestFunction2.execute " + rfContext);
while (true && !rfContext.getDataSet().isDestroyed()) {
rfContext.getDataSet().getCache().getLogger().info("For Bug43513 ");
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
return;
}
}
}
} else if (rfContext.getArguments() instanceof String) {
String key = (String) rfContext.getArguments();
if (key.equals("TestingTimeOut")) {
// PRFunctionExecutionDUnitTest#testRemoteMultiKeyExecution_timeout
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
rfContext.getDataSet().getCache().getLogger().warning("Got Exception : Thread Interrupted" + e);
}
}
if (PartitionRegionHelper.isPartitionedRegion(rfContext.getDataSet())) {
/*
* return (Serializable)PartitionRegionHelper.getLocalDataForContext(rfContext).get(key);
*/
rfContext.getResultSender().lastResult((Serializable) PartitionRegionHelper.getLocalDataForContext(rfContext).get(key));
} else {
rfContext.getResultSender().lastResult((Serializable) rfContext.getDataSet().get(key));
}
/* return (Serializable)rfContext.getDataSet().get(key); */
} else if (rfContext.getArguments() instanceof Set) {
Set origKeys = (Set) rfContext.getArguments();
ArrayList vals = new ArrayList();
for (Object key : origKeys) {
Object val = PartitionRegionHelper.getLocalDataForContext(rfContext).get(key);
if (val != null) {
vals.add(val);
}
}
rfContext.getResultSender().lastResult(vals);
/* return vals; */
} else if (rfContext.getArguments() instanceof HashMap) {
HashMap putData = (HashMap) rfContext.getArguments();
for (Iterator i = putData.entrySet().iterator(); i.hasNext(); ) {
Map.Entry me = (Map.Entry) i.next();
rfContext.getDataSet().put(me.getKey(), me.getValue());
}
rfContext.getResultSender().lastResult(Boolean.TRUE);
} else {
rfContext.getResultSender().lastResult(Boolean.FALSE);
}
} else {
if (hasResult()) {
context.getResultSender().lastResult(Boolean.FALSE);
} else {
DistributedSystem ds = InternalDistributedSystem.getAnyInstance();
LogWriter logger = ds.getLogWriter();
logger.info("Executing in TestFunction on Server : " + ds.getDistributedMember() + "with Context : " + context);
while (ds.isConnected()) {
logger.fine("Just executing function in infinite loop for Bug43513");
try {
Thread.sleep(250);
} catch (InterruptedException e) {
return;
}
}
}
}
}
use of org.apache.geode.cache.execute.RegionFunctionContext in project geode by apache.
the class TestFunction method executeHA.
public void executeHA(FunctionContext context) {
RegionFunctionContext rcontext = (RegionFunctionContext) context;
Region region = rcontext.getDataSet();
region.getCache().getLogger().fine("executeHA#execute( " + rcontext + " )");
WaitCriterion wc = new WaitCriterion() {
String excuse;
public boolean done() {
return false;
}
public String description() {
return excuse;
}
};
Wait.waitForCriterion(wc, 10000, 500, false);
rcontext.getResultSender().lastResult((Serializable) rcontext.getArguments());
}
use of org.apache.geode.cache.execute.RegionFunctionContext in project geode by apache.
the class TestFunction method executeHAAndNonHAOnRegion.
public void executeHAAndNonHAOnRegion(FunctionContext context) {
List<CacheServer> servers = CacheFactory.getAnyInstance().getCacheServers();
ArrayList<String> args = (ArrayList<String>) context.getArguments();
RegionFunctionContext rfContext = (RegionFunctionContext) context;
rfContext.getDataSet().getCache().getLogger().info("Executing function : executeHAAndNonHAOnRegion " + rfContext);
Region r = CacheFactory.getAnyInstance().getRegion(args.get(0));
String testName = args.get(1);
Integer numTimesStopped = (Integer) r.get("stopped");
Integer numTimesSentResult = (Integer) r.get("sentresult");
if (context.isPossibleDuplicate()) {
if (testName.equals("regionExecutionHATwoServerDown")) {
if ((Integer) r.get("stopped") == 2) {
if (numTimesSentResult == null) {
r.put("sentresult", 1);
} else {
r.put("sentresult", ++numTimesSentResult);
}
context.getResultSender().lastResult(args.get(0));
} else {
r.put("stopped", ++numTimesStopped);
for (CacheServer s : servers) {
if (((CacheServerImpl) s).getSystem().getDistributedMember().equals(((GemFireCacheImpl) CacheFactory.getAnyInstance()).getMyId())) {
s.stop();
DistributedSystem ds = InternalDistributedSystem.getAnyInstance();
ds.disconnect();
}
}
}
} else if (testName.equals("regionExecutionHAOneServerDown")) {
if (numTimesSentResult == null) {
r.put("sentresult", 1);
} else {
r.put("sentresult", ++numTimesSentResult);
}
context.getResultSender().lastResult(args.get(0));
} else {
context.getResultSender().lastResult(args.get(0));
}
} else {
if (numTimesStopped == null) {
r.put("stopped", 1);
} else {
r.put("stopped", ++numTimesStopped);
}
for (CacheServer s : servers) {
if (((CacheServerImpl) s).getSystem().getDistributedMember().equals(((GemFireCacheImpl) CacheFactory.getAnyInstance()).getMyId())) {
s.stop();
DistributedSystem ds = InternalDistributedSystem.getAnyInstance();
ds.disconnect();
}
}
context.getResultSender().lastResult(args.get(0));
}
}
use of org.apache.geode.cache.execute.RegionFunctionContext in project geode by apache.
the class TestFunction method executeResultSender.
private void executeResultSender(FunctionContext context) {
ResultSender resultSender = context.getResultSender();
if (context instanceof RegionFunctionContext) {
RegionFunctionContext rfContext = (RegionFunctionContext) context;
rfContext.getDataSet().getCache().getLogger().info("Executing function : TestFunctionexecuteResultSender.execute " + rfContext);
if (rfContext.getArguments() instanceof Boolean) {
if (this.hasResult()) {
resultSender.lastResult((Serializable) rfContext.getArguments());
}
} else if (rfContext.getArguments() instanceof Set) {
Set origKeys = (Set) rfContext.getArguments();
Object[] objectArray = origKeys.toArray();
int size = objectArray.length;
int i = 0;
for (; i < (size - 1); i++) {
Object val = PartitionRegionHelper.getLocalDataForContext(rfContext).get(objectArray[i]);
if (val != null) {
resultSender.sendResult((Serializable) val);
}
}
resultSender.lastResult((Serializable) objectArray[i]);
} else {
resultSender.lastResult(Boolean.FALSE);
}
} else {
resultSender.lastResult(Boolean.FALSE);
}
}
use of org.apache.geode.cache.execute.RegionFunctionContext in project geode by apache.
the class TestFunction method executeTimeOut.
public void executeTimeOut(FunctionContext context) {
try {
synchronized (this) {
this.wait(2000);
}
} catch (InterruptedException e) {
}
if (context instanceof RegionFunctionContext) {
RegionFunctionContext rfContext = (RegionFunctionContext) context;
rfContext.getDataSet().getCache().getLogger().info("Executing function : TestFunction.execute " + rfContext);
if (rfContext.getArguments() instanceof Boolean) {
/* return rfContext.getArguments(); */
rfContext.getResultSender().lastResult((Serializable) rfContext.getArguments());
} else if (rfContext.getArguments() instanceof String) {
String key = (String) rfContext.getArguments();
if (key.equals("TestingTimeOut")) {
// PRFunctionExecutionDUnitTest#testRemoteMultiKeyExecution_timeout
try {
synchronized (this) {
this.wait(2000);
}
} catch (InterruptedException e) {
rfContext.getDataSet().getCache().getLogger().warning("Got Exception : Thread Interrupted" + e);
}
}
try {
synchronized (this) {
this.wait(2000);
}
} catch (InterruptedException e) {
rfContext.getDataSet().getCache().getLogger().warning("Got Exception : Thread Interrupted" + e);
}
if (PartitionRegionHelper.isPartitionedRegion(rfContext.getDataSet())) {
/*
* return (Serializable)PartitionRegionHelper.getLocalDataForContext(rfContext).get(key);
*/
rfContext.getResultSender().lastResult((Serializable) PartitionRegionHelper.getLocalDataForContext(rfContext).get(key));
} else {
rfContext.getResultSender().lastResult((Serializable) rfContext.getDataSet().get(key));
}
/* return (Serializable)rfContext.getDataSet().get(key); */
} else if (rfContext.getArguments() instanceof Set) {
Set origKeys = (Set) rfContext.getArguments();
ArrayList vals = new ArrayList();
for (Iterator i = origKeys.iterator(); i.hasNext(); ) {
Object val = PartitionRegionHelper.getLocalDataForContext(rfContext).get(i.next());
if (val != null) {
vals.add(val);
}
}
rfContext.getResultSender().lastResult(vals);
/* return vals; */
} else if (rfContext.getArguments() instanceof HashMap) {
HashMap putData = (HashMap) rfContext.getArguments();
for (Iterator i = putData.entrySet().iterator(); i.hasNext(); ) {
Map.Entry me = (Map.Entry) i.next();
rfContext.getDataSet().put(me.getKey(), me.getValue());
}
rfContext.getResultSender().lastResult(Boolean.TRUE);
} else {
rfContext.getResultSender().lastResult(Boolean.FALSE);
}
} else {
context.getResultSender().lastResult(Boolean.FALSE);
}
}
Aggregations