use of org.apache.geode.internal.cache.functions.TestFunction in project geode by apache.
the class PRFunctionExecutionTimeOutDUnitTest method testRemoteSingleKeyExecution_byName.
/**
* Test remote execution by a pure accessor. Then test it using timeout and multiple getResult.
*
* @throws Exception
*/
@Test
public void testRemoteSingleKeyExecution_byName() throws Exception {
final String rName = getUniqueName();
Host host = Host.getHost(0);
final VM accessor = host.getVM(2);
final VM datastore = host.getVM(3);
getCache();
accessor.invoke(new SerializableCallable("Create PR") {
public Object call() throws Exception {
RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
getCache().createRegion(rName, ra);
return Boolean.TRUE;
}
});
datastore.invoke(new SerializableCallable("Create PR with Function Factory") {
public Object call() throws Exception {
RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
AttributesFactory raf = new AttributesFactory(ra);
PartitionAttributesImpl pa = new PartitionAttributesImpl();
pa.setAll(ra.getPartitionAttributes());
raf.setPartitionAttributes(pa);
getCache().createRegion(rName, raf.create());
Function function = new TestFunction(true, TEST_FUNCTION_TIMEOUT);
FunctionService.registerFunction(function);
return Boolean.TRUE;
}
});
Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {
public Object call() throws Exception {
PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
DistributedSystem.setThreadsSocketPolicy(false);
Function function = new TestFunction(true, TEST_FUNCTION_TIMEOUT);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(pr);
try {
dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
} catch (Exception expected) {
// No data should cause exec to throw
assertTrue(expected.getMessage().contains("No target node found for KEY = " + testKey));
}
pr.put(testKey, new Integer(1));
ResultCollector rs1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
assertEquals(Boolean.TRUE, ((List) rs1.getResult()).get(0));
try {
rs1.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
ResultCollector rs2 = dataSet.withFilter(testKeysSet).setArguments(testKey).execute(function.getId());
assertEquals(new Integer(1), ((List) rs2.getResult()).get(0));
try {
rs1.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
HashMap putData = new HashMap();
putData.put(testKey + "1", new Integer(2));
putData.put(testKey + "2", new Integer(3));
ResultCollector rs3 = dataSet.withFilter(testKeysSet).setArguments(putData).execute(function.getId());
assertEquals(Boolean.TRUE, ((List) rs3.getResult(4000, TimeUnit.MILLISECONDS)).get(0));
try {
rs1.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
assertEquals(new Integer(2), pr.get(testKey + "1"));
assertEquals(new Integer(3), pr.get(testKey + "2"));
ResultCollector rst1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
try {
rst1.getResult(1000, TimeUnit.MILLISECONDS);
fail("Did not get the expected timeout exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
}
try {
rst1.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
ResultCollector rst2 = dataSet.withFilter(testKeysSet).setArguments(testKey).execute(function.getId());
try {
rst2.getResult(1000, TimeUnit.MILLISECONDS);
fail("Did not get the expected timeout exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
}
try {
rst2.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
HashMap putDataTimeOut = new HashMap();
putDataTimeOut.put(testKey + "4", new Integer(4));
putDataTimeOut.put(testKey + "5", new Integer(5));
ResultCollector rst3 = dataSet.withFilter(testKeysSet).setArguments(putDataTimeOut).execute(function.getId());
try {
rst3.getResult(1000, TimeUnit.MILLISECONDS);
fail("Did not get the expected timeout exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
}
try {
rst3.getResult();
fail("Did not get the expected exception.");
} catch (FunctionException fe) {
assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
}
return Boolean.TRUE;
}
});
assertEquals(Boolean.TRUE, o);
}
use of org.apache.geode.internal.cache.functions.TestFunction in project geode by apache.
the class PRFunctionExecutionDUnitTest method testExecutionOnAllNodes_byName.
/**
* Ensure that the execution is happening all the PR as a whole
*/
@Test
public void testExecutionOnAllNodes_byName() throws Exception {
final String rName = getUniqueName();
Host host = Host.getHost(0);
final VM datastore0 = host.getVM(0);
final VM datastore1 = host.getVM(1);
final VM datastore2 = host.getVM(2);
final VM datastore3 = host.getVM(3);
getCache();
SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {
public Object call() throws Exception {
RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
AttributesFactory raf = new AttributesFactory(ra);
PartitionAttributesImpl pa = new PartitionAttributesImpl();
pa.setAll(ra.getPartitionAttributes());
pa.setTotalNumBuckets(17);
raf.setPartitionAttributes(pa);
getCache().createRegion(rName, raf.create());
// Function function = new TestFunction(true,"TestFunction2");
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION2);
FunctionService.registerFunction(function);
return Boolean.TRUE;
}
};
datastore0.invoke(dataStoreCreate);
datastore1.invoke(dataStoreCreate);
datastore2.invoke(dataStoreCreate);
datastore3.invoke(dataStoreCreate);
Object o = datastore3.invoke(new SerializableCallable("Create data, invoke exectuable") {
public Object call() throws Exception {
PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
DistributedSystem.setThreadsSocketPolicy(false);
final HashSet testKeys = new HashSet();
for (int i = (pr.getTotalNumberOfBuckets() * 3); i > 0; i--) {
testKeys.add("execKey-" + i);
}
int j = 0;
for (Iterator i = testKeys.iterator(); i.hasNext(); ) {
Integer val = new Integer(j++);
pr.put(i.next(), val);
}
// Assert there is data in each bucket
for (int bid = 0; bid < pr.getTotalNumberOfBuckets(); bid++) {
assertTrue(pr.getBucketKeys(bid).size() > 0);
}
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION2);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(pr);
ResultCollector rc1 = dataSet.setArguments(Boolean.TRUE).execute(function.getId());
List l = ((List) rc1.getResult());
LogWriterUtils.getLogWriter().info("PRFunctionExecutionDUnitTest#testExecutionOnAllNodes_byName : Result size :" + l.size() + " Result : " + l);
assertEquals(4, l.size());
for (int i = 0; i < 4; i++) {
assertEquals(Boolean.TRUE, l.iterator().next());
}
return Boolean.TRUE;
}
});
assertEquals(Boolean.TRUE, o);
}
use of org.apache.geode.internal.cache.functions.TestFunction in project geode by apache.
the class PRFunctionExecutionDUnitTest method testFunctionExecution.
/**
* Test to validate that the function execution is successful on PR with Loner Distributed System
*/
@Test
public void testFunctionExecution() throws Exception {
final String rName = getUniqueName();
Host host = Host.getHost(0);
final VM datastore = host.getVM(3);
datastore.invoke(new SerializableCallable("Create PR with Function Factory") {
public Object call() throws Exception {
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
AttributesFactory raf = new AttributesFactory(ra);
PartitionAttributesImpl pa = new PartitionAttributesImpl();
pa.setAll(ra.getPartitionAttributes());
raf.setPartitionAttributes(pa);
Region pr = cache.createRegion(rName, raf.create());
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION1);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(pr);
ResultCollector result = dataSet.setArguments(Boolean.TRUE).withFilter(testKeysSet).execute(function);
System.out.println("KBKBKB : Result I got : " + result.getResult());
return Boolean.TRUE;
}
});
}
use of org.apache.geode.internal.cache.functions.TestFunction in project geode by apache.
the class PRFunctionExecutionDUnitTest method executeFunction.
public static Object executeFunction() {
PartitionedRegion pr = (PartitionedRegion) cache.getRegion(regionName);
final HashSet testKeysSet = new HashSet();
for (int i = (pr.getTotalNumberOfBuckets() * 2); i > 0; i--) {
testKeysSet.add("execKey-" + i);
}
Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA);
FunctionService.registerFunction(function);
Execution dataSet = FunctionService.onRegion(pr);
ResultCollector rs = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
List l = ((List) rs.getResult());
return l;
}
use of org.apache.geode.internal.cache.functions.TestFunction in project geode by apache.
the class PRFunctionExecutionDUnitTest method testHAFunctionExecution.
@Test
public void testHAFunctionExecution() throws Exception {
final String rName = getUniqueName();
Host host = Host.getHost(0);
final VM datastore = host.getVM(3);
getCache();
datastore.invoke(new SerializableCallable("Create PR with Function Factory") {
public Object call() throws Exception {
RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
AttributesFactory raf = new AttributesFactory(ra);
PartitionAttributesImpl pa = new PartitionAttributesImpl();
pa.setAll(ra.getPartitionAttributes());
raf.setPartitionAttributes(pa);
Region pr = getCache().createRegion(rName, raf.create());
final String testKey = "execKey";
final Set testKeysSet = new HashSet();
testKeysSet.add(testKey);
Function function = new TestFunction(false, TestFunction.TEST_FUNCTION10);
try {
FunctionService.registerFunction(function);
fail("It should have failed with Function attributes don't match");
} catch (Exception expected) {
expected.printStackTrace();
assertTrue(expected.getMessage().contains("For Functions with isHA true, hasResult must also be true."));
}
try {
Execution dataSet = FunctionService.onRegion(pr);
dataSet.withFilter(testKeysSet).setArguments(testKey).execute(function);
fail("It should have failed with Function attributes don't match");
} catch (Exception expected) {
expected.printStackTrace();
assertTrue(expected.getMessage().contains("For Functions with isHA true, hasResult must also be true."));
}
return Boolean.TRUE;
}
});
}
Aggregations