Search in sources :

Example 6 with PartitionAttributesImpl

use of org.apache.geode.internal.cache.PartitionAttributesImpl in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteMultiKeyExecution_byInstance.

/**
   * Test multi-key remote execution by a pure accessor which doesn't have the function factory
   * present.
   */
@Test
public void testRemoteMultiKeyExecution_byInstance() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(3);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    datastore2.invoke(dataStoreCreate);
    Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final HashSet testKeysSet = new HashSet();
            for (int i = (pr.getTotalNumberOfBuckets() * 2); i > 0; i--) {
                testKeysSet.add("execKey-" + i);
            }
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            try {
                dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function);
            } catch (Exception expected) {
                // No data should cause exec to throw
                LogWriterUtils.getLogWriter().warning("Exception Occurred : " + expected.getMessage());
            // boolean expectedStr = expected.getMessage().startsWith("No target
            // node was found for routingKey");
            // assertTrue("Unexpected exception: " + expected, expectedStr);
            }
            int j = 0;
            HashSet origVals = new HashSet();
            for (Iterator i = testKeysSet.iterator(); i.hasNext(); ) {
                Integer val = new Integer(j++);
                origVals.add(val);
                pr.put(i.next(), val);
            }
            // DefaultResultCollector rc1 = new DefaultResultCollector();
            ResultCollector rc1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function);
            List l = ((List) rc1.getResult());
            assertEquals(3, l.size());
            for (Iterator i = l.iterator(); i.hasNext(); ) {
                assertEquals(Boolean.TRUE, i.next());
            }
            // DefaultResultCollector rc2 = new DefaultResultCollector();
            ResultCollector rc2 = dataSet.withFilter(testKeysSet).setArguments(testKeysSet).execute(function);
            List l2 = ((List) rc2.getResult());
            // assertIndexDetailsEquals(pr.getTotalNumberOfBuckets(), l2.size());
            assertEquals(3, l2.size());
            // assertIndexDetailsEquals(pr.getTotalNumberOfBuckets(), l.size());
            HashSet foundVals = new HashSet();
            for (Iterator i = l2.iterator(); i.hasNext(); ) {
                ArrayList subL = (ArrayList) i.next();
                assertTrue(subL.size() > 0);
                for (Iterator subI = subL.iterator(); subI.hasNext(); ) {
                    assertTrue(foundVals.add(subI.next()));
                }
            }
            assertEquals(origVals, foundVals);
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) ArrayList(java.util.ArrayList) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 7 with PartitionAttributesImpl

use of org.apache.geode.internal.cache.PartitionAttributesImpl in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteMultipleKeyExecution_byName_FunctionInvocationTargetException.

/**
   * Test remote execution by a pure accessor which doesn't have the function factory
   * present.Function throws the FunctionInvocationTargetException. As this is the case of HA then
   * system should retry the function execution. After 5th attempt function will send Boolean as
   * last result.
   */
@Test
public void testRemoteMultipleKeyExecution_byName_FunctionInvocationTargetException() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(3);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_REEXECUTE_EXCEPTION);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    datastore2.invoke(dataStoreCreate);
    Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final HashSet testKeysSet = new HashSet();
            for (int i = (pr.getTotalNumberOfBuckets() * 2); i > 0; i--) {
                testKeysSet.add("execKey-" + i);
            }
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_REEXECUTE_EXCEPTION);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            int j = 0;
            HashSet origVals = new HashSet();
            for (Iterator i = testKeysSet.iterator(); i.hasNext(); ) {
                Integer val = new Integer(j++);
                origVals.add(val);
                pr.put(i.next(), val);
            }
            try {
                ResultCollector rs = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
                List list = (ArrayList) rs.getResult();
                assertEquals(list.get(0), 5);
            } catch (Throwable e) {
                e.printStackTrace();
                Assert.fail("This is not expected Exception", e);
            }
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 8 with PartitionAttributesImpl

use of org.apache.geode.internal.cache.PartitionAttributesImpl in project geode by apache.

the class PRFunctionExecutionDUnitTest method testExecutionOnAllNodes_LocalReadPR.

/**
   * Ensure that the execution is happening on all the PR as a whole with LocalReadPR as
   * LocalDataSet
   */
@Test
public void testExecutionOnAllNodes_LocalReadPR() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    final VM datastore3 = host.getVM(3);
    getCache();
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            pa.setTotalNumBuckets(17);
            pa.setPartitionResolver(new CustomerIDPartitionResolver("CustomerIDPartitionResolver"));
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION3);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    datastore2.invoke(dataStoreCreate);
    datastore3.invoke(dataStoreCreate);
    Object o = datastore3.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            DistributedSystem.setThreadsSocketPolicy(false);
            final HashSet testKeys = new HashSet();
            // later check for them
            for (int i = 1; i <= 10; i++) {
                CustId custid = new CustId(i);
                Customer customer = new Customer("name" + i, "Address" + i);
                try {
                    pr.put(custid, customer);
                    assertNotNull(pr.get(custid));
                    assertEquals(customer, pr.get(custid));
                    testKeys.add(custid);
                } catch (Exception e) {
                    Assert.fail("putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ", e);
                }
                LogWriterUtils.getLogWriter().fine("Customer :- { " + custid + " : " + customer + " }");
            }
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION3);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            ResultCollector rc1 = dataSet.setArguments(testKeys).execute(function.getId());
            List l = ((List) rc1.getResult());
            assertEquals(4, l.size());
            ArrayList vals = new ArrayList();
            Iterator itr = l.iterator();
            for (int i = 0; i < 4; i++) {
                vals.addAll((ArrayList) itr.next());
            }
            assertEquals(vals.size(), testKeys.size());
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) Customer(org.apache.geode.internal.cache.execute.data.Customer) ArrayList(java.util.ArrayList) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) CustId(org.apache.geode.internal.cache.execute.data.CustId) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 9 with PartitionAttributesImpl

use of org.apache.geode.internal.cache.PartitionAttributesImpl in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteMultiKeyExecution_byName.

/**
   * Test multi-key remote execution by a pure accessor which doesn't have the function factory
   * present. ResultCollector = DefaultResultCollector haveResults = true;
   */
@Test
public void testRemoteMultiKeyExecution_byName() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(3);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    datastore2.invoke(dataStoreCreate);
    Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final HashSet testKeysSet = new HashSet();
            for (int i = (pr.getTotalNumberOfBuckets() * 2); i > 0; i--) {
                testKeysSet.add("execKey-" + i);
            }
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            try {
                dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
            } catch (Exception expected) {
                assertTrue(expected.getMessage(), expected.getMessage().contains("No target node found for KEY"));
            }
            int j = 0;
            HashSet origVals = new HashSet();
            for (Iterator i = testKeysSet.iterator(); i.hasNext(); ) {
                Integer val = new Integer(j++);
                origVals.add(val);
                pr.put(i.next(), val);
            }
            ResultCollector rs = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
            List l = ((List) rs.getResult());
            assertEquals(3, l.size());
            for (Iterator i = l.iterator(); i.hasNext(); ) {
                assertEquals(Boolean.TRUE, i.next());
            }
            // DefaultResultCollector rc2 = new DefaultResultCollector();
            ResultCollector rc2 = dataSet.withFilter(testKeysSet).setArguments(testKeysSet).execute(function.getId());
            List l2 = ((List) rc2.getResult());
            assertEquals(3, l2.size());
            HashSet foundVals = new HashSet();
            for (Iterator i = l2.iterator(); i.hasNext(); ) {
                ArrayList subL = (ArrayList) i.next();
                assertTrue(subL.size() > 0);
                for (Iterator subI = subL.iterator(); subI.hasNext(); ) {
                    assertTrue(foundVals.add(subI.next()));
                }
            }
            assertEquals(origVals, foundVals);
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) ArrayList(java.util.ArrayList) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 10 with PartitionAttributesImpl

use of org.apache.geode.internal.cache.PartitionAttributesImpl in project geode by apache.

the class PRFunctionExecutionTimeOutDUnitTest method testRemoteSingleKeyExecution_byName.

/**
   * Test remote execution by a pure accessor. Then test it using timeout and multiple getResult.
   * 
   * @throws Exception
   */
@Test
public void testRemoteSingleKeyExecution_byName() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(2);
    final VM datastore = host.getVM(3);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    datastore.invoke(new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TEST_FUNCTION_TIMEOUT);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    });
    Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final String testKey = "execKey";
            final Set testKeysSet = new HashSet();
            testKeysSet.add(testKey);
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TEST_FUNCTION_TIMEOUT);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            try {
                dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
            } catch (Exception expected) {
                // No data should cause exec to throw
                assertTrue(expected.getMessage().contains("No target node found for KEY = " + testKey));
            }
            pr.put(testKey, new Integer(1));
            ResultCollector rs1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
            assertEquals(Boolean.TRUE, ((List) rs1.getResult()).get(0));
            try {
                rs1.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            ResultCollector rs2 = dataSet.withFilter(testKeysSet).setArguments(testKey).execute(function.getId());
            assertEquals(new Integer(1), ((List) rs2.getResult()).get(0));
            try {
                rs1.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            HashMap putData = new HashMap();
            putData.put(testKey + "1", new Integer(2));
            putData.put(testKey + "2", new Integer(3));
            ResultCollector rs3 = dataSet.withFilter(testKeysSet).setArguments(putData).execute(function.getId());
            assertEquals(Boolean.TRUE, ((List) rs3.getResult(4000, TimeUnit.MILLISECONDS)).get(0));
            try {
                rs1.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            assertEquals(new Integer(2), pr.get(testKey + "1"));
            assertEquals(new Integer(3), pr.get(testKey + "2"));
            ResultCollector rst1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
            try {
                rst1.getResult(1000, TimeUnit.MILLISECONDS);
                fail("Did not get the expected timeout exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
            }
            try {
                rst1.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            ResultCollector rst2 = dataSet.withFilter(testKeysSet).setArguments(testKey).execute(function.getId());
            try {
                rst2.getResult(1000, TimeUnit.MILLISECONDS);
                fail("Did not get the expected timeout exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
            }
            try {
                rst2.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            HashMap putDataTimeOut = new HashMap();
            putDataTimeOut.put(testKey + "4", new Integer(4));
            putDataTimeOut.put(testKey + "5", new Integer(5));
            ResultCollector rst3 = dataSet.withFilter(testKeysSet).setArguments(putDataTimeOut).execute(function.getId());
            try {
                rst3.getResult(1000, TimeUnit.MILLISECONDS);
                fail("Did not get the expected timeout exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("All results not recieved in time provided."));
            }
            try {
                rst3.getResult();
                fail("Did not get the expected exception.");
            } catch (FunctionException fe) {
                assertTrue(fe.getMessage(), fe.getMessage().contains("Result already collected"));
            }
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) HashMap(java.util.HashMap) FunctionException(org.apache.geode.cache.execute.FunctionException) Host(org.apache.geode.test.dunit.Host) FunctionException(org.apache.geode.cache.execute.FunctionException) IgnoredException(org.apache.geode.test.dunit.IgnoredException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) ArrayList(java.util.ArrayList) List(java.util.List) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) FlakyTest(org.apache.geode.test.junit.categories.FlakyTest) Test(org.junit.Test) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Aggregations

PartitionAttributesImpl (org.apache.geode.internal.cache.PartitionAttributesImpl)64 AttributesFactory (org.apache.geode.cache.AttributesFactory)56 RegionAttributes (org.apache.geode.cache.RegionAttributes)55 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)55 Host (org.apache.geode.test.dunit.Host)55 VM (org.apache.geode.test.dunit.VM)55 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)55 Test (org.junit.Test)55 HashSet (java.util.HashSet)54 SerializableCallable (org.apache.geode.test.dunit.SerializableCallable)54 Function (org.apache.geode.cache.execute.Function)51 TestFunction (org.apache.geode.internal.cache.functions.TestFunction)51 Execution (org.apache.geode.cache.execute.Execution)50 ArrayList (java.util.ArrayList)49 ResultCollector (org.apache.geode.cache.execute.ResultCollector)49 FunctionException (org.apache.geode.cache.execute.FunctionException)46 List (java.util.List)42 Iterator (java.util.Iterator)37 PartitionAttributesFactory (org.apache.geode.cache.PartitionAttributesFactory)37 IgnoredException (org.apache.geode.test.dunit.IgnoredException)36