Search in sources :

Example 86 with Function

use of org.apache.geode.cache.execute.Function in project geode by apache.

the class MiscellaneousCommands method executeAndBuildResult.

Result executeAndBuildResult(TabularResultData resultTable, Set<DistributedMember> dsMembers) {
    try {
        List<?> resultList = null;
        Function garbageCollectionFunction = new GarbageCollectionFunction();
        resultList = (List<?>) CliUtil.executeFunction(garbageCollectionFunction, null, dsMembers).getResult();
        for (int i = 0; i < resultList.size(); i++) {
            Object object = resultList.get(i);
            if (object instanceof Exception) {
                LogWrapper.getInstance().fine("Exception in GC " + ((Throwable) object).getMessage(), ((Throwable) object));
                continue;
            } else if (object instanceof Throwable) {
                LogWrapper.getInstance().fine("Exception in GC " + ((Throwable) object).getMessage(), ((Throwable) object));
                continue;
            }
            if (object != null) {
                if (object instanceof String) {
                    // unexpected exception string - cache may be closed or something
                    return ResultBuilder.createUserErrorResult((String) object);
                } else {
                    Map<String, String> resultMap = (Map<String, String>) object;
                    toTabularResultData(resultTable, (String) resultMap.get("MemberId"), (String) resultMap.get("HeapSizeBeforeGC"), (String) resultMap.get("HeapSizeAfterGC"), (String) resultMap.get("TimeSpentInGC"));
                }
            } else {
                LogWrapper.getInstance().fine("ResultMap was null ");
            }
        }
    } catch (Exception e) {
        String stack = CliUtil.stackTraceAsString(e);
        LogWrapper.getInstance().info("GC exception is " + stack);
        return ResultBuilder.createGemFireErrorResult(e.getMessage() + ": " + stack);
    }
    return ResultBuilder.buildResult(resultTable);
}
Also used : ChangeLogLevelFunction(org.apache.geode.management.internal.cli.functions.ChangeLogLevelFunction) Function(org.apache.geode.cache.execute.Function) NetstatFunction(org.apache.geode.management.internal.cli.functions.NetstatFunction) ShutDownFunction(org.apache.geode.management.internal.cli.functions.ShutDownFunction) GarbageCollectionFunction(org.apache.geode.management.internal.cli.functions.GarbageCollectionFunction) GetStackTracesFunction(org.apache.geode.management.internal.cli.functions.GetStackTracesFunction) GarbageCollectionFunction(org.apache.geode.management.internal.cli.functions.GarbageCollectionFunction) Map(java.util.Map) HashMap(java.util.HashMap) ConverterHint(org.apache.geode.management.cli.ConverterHint) TimeoutException(java.util.concurrent.TimeoutException) CommandResultException(org.apache.geode.management.internal.cli.result.CommandResultException) ResultDataException(org.apache.geode.management.internal.cli.result.ResultDataException) FunctionException(org.apache.geode.cache.execute.FunctionException) DataFormatException(java.util.zip.DataFormatException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException)

Example 87 with Function

use of org.apache.geode.cache.execute.Function in project geode by apache.

the class ClientFunctionTimeoutRegressionTest method executeFunction.

private void executeFunction(String mode, Integer timeout) {
    Function function = new TestFunction(mode + timeout);
    FunctionService.registerFunction(function);
    Execution dataSet;
    if ("region".equalsIgnoreCase(mode)) {
        dataSet = FunctionService.onRegion(clientCache.getRegion(REGION_NAME)).setArguments(timeout);
    } else if ("server".equalsIgnoreCase(mode)) {
        dataSet = FunctionService.onServer(clientCache.getDefaultPool()).setArguments(timeout);
    } else {
        dataSet = FunctionService.onServers(clientCache).setArguments(timeout);
    }
    ResultCollector rs = dataSet.execute(function);
    assertThat((Boolean) ((ArrayList) rs.getResult()).get(0)).as("Server did not read client_function_timeout from client.").isTrue();
}
Also used : Function(org.apache.geode.cache.execute.Function) Execution(org.apache.geode.cache.execute.Execution) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector)

Example 88 with Function

use of org.apache.geode.cache.execute.Function in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteSingleKeyExecution_byName_FunctionInvocationTargetException.

/**
   * Test remote execution by a pure accessor which doesn't have the function factory
   * present.Function throws the FunctionInvocationTargetException. As this is the case of HA then
   * system should retry the function execution. After 5th attempt function will send Boolean as
   * last result.
   */
@Test
public void testRemoteSingleKeyExecution_byName_FunctionInvocationTargetException() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(2);
    final VM datastore = host.getVM(3);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    datastore.invoke(new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_REEXECUTE_EXCEPTION);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    });
    accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final String testKey = "execKey";
            final Set testKeysSet = new HashSet();
            testKeysSet.add(testKey);
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_REEXECUTE_EXCEPTION);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            pr.put(testKey, new Integer(1));
            try {
                ResultCollector rs1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function.getId());
                List list = (ArrayList) rs1.getResult();
                assertEquals(list.get(0), 5);
            } catch (Throwable e) {
                e.printStackTrace();
                Assert.fail("This is not expected Exception", e);
            }
            return Boolean.TRUE;
        }
    });
}
Also used : LocalDataSet(org.apache.geode.internal.cache.LocalDataSet) Set(java.util.Set) HashSet(java.util.HashSet) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 89 with Function

use of org.apache.geode.cache.execute.Function in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteMultiKeyExecution_BucketMoved.

@Test
public void testRemoteMultiKeyExecution_BucketMoved() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(3);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            AttributesFactory factory = new AttributesFactory();
            PartitionAttributesFactory paf = new PartitionAttributesFactory();
            paf.setTotalNumBuckets(113);
            paf.setLocalMaxMemory(0);
            paf.setRedundantCopies(1);
            paf.setStartupRecoveryDelay(0);
            PartitionAttributes partitionAttributes = paf.create();
            factory.setDataPolicy(DataPolicy.PARTITION);
            factory.setPartitionAttributes(partitionAttributes);
            RegionAttributes attrs = factory.create();
            getCache().createRegion(rName, attrs);
            return Boolean.TRUE;
        }
    });
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            AttributesFactory factory = new AttributesFactory();
            PartitionAttributesFactory paf = new PartitionAttributesFactory();
            paf.setTotalNumBuckets(113);
            paf.setLocalMaxMemory(40);
            paf.setRedundantCopies(1);
            paf.setStartupRecoveryDelay(0);
            PartitionAttributes partitionAttributes = paf.create();
            factory.setDataPolicy(DataPolicy.PARTITION);
            factory.setPartitionAttributes(partitionAttributes);
            RegionAttributes attrs = factory.create();
            getCache().createRegion(rName, attrs);
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_LASTRESULT);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    SerializableCallable put = new SerializableCallable("put in PR") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            for (int i = 0; i < 113; i++) {
                pr.put(i, "execKey-" + i);
            }
            return Boolean.TRUE;
        }
    };
    accessor.invoke(put);
    datastore2.invoke(dataStoreCreate);
    Object result = accessor.invoke(new SerializableCallable("invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TestFunction.TEST_FUNCTION_LASTRESULT);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            ResultCollector rc2 = dataSet.setArguments(Boolean.TRUE).execute(function.getId());
            List l = ((List) rc2.getResult());
            return l;
        }
    });
    List l = (List) result;
    assertEquals(2, l.size());
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) PartitionAttributes(org.apache.geode.cache.PartitionAttributes) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 90 with Function

use of org.apache.geode.cache.execute.Function in project geode by apache.

the class PRFunctionExecutionDUnitTest method testRemoteMultiKeyExecution_byInstance.

/**
   * Test multi-key remote execution by a pure accessor which doesn't have the function factory
   * present.
   */
@Test
public void testRemoteMultiKeyExecution_byInstance() throws Exception {
    final String rName = getUniqueName();
    Host host = Host.getHost(0);
    final VM accessor = host.getVM(3);
    final VM datastore0 = host.getVM(0);
    final VM datastore1 = host.getVM(1);
    final VM datastore2 = host.getVM(2);
    getCache();
    accessor.invoke(new SerializableCallable("Create PR") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 0);
            getCache().createRegion(rName, ra);
            return Boolean.TRUE;
        }
    });
    SerializableCallable dataStoreCreate = new SerializableCallable("Create PR with Function Factory") {

        public Object call() throws Exception {
            RegionAttributes ra = PartitionedRegionTestHelper.createRegionAttrsForPR(0, 10);
            AttributesFactory raf = new AttributesFactory(ra);
            PartitionAttributesImpl pa = new PartitionAttributesImpl();
            pa.setAll(ra.getPartitionAttributes());
            raf.setPartitionAttributes(pa);
            getCache().createRegion(rName, raf.create());
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            return Boolean.TRUE;
        }
    };
    datastore0.invoke(dataStoreCreate);
    datastore1.invoke(dataStoreCreate);
    datastore2.invoke(dataStoreCreate);
    Object o = accessor.invoke(new SerializableCallable("Create data, invoke exectuable") {

        public Object call() throws Exception {
            PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(rName);
            final HashSet testKeysSet = new HashSet();
            for (int i = (pr.getTotalNumberOfBuckets() * 2); i > 0; i--) {
                testKeysSet.add("execKey-" + i);
            }
            DistributedSystem.setThreadsSocketPolicy(false);
            Function function = new TestFunction(true, TEST_FUNCTION2);
            FunctionService.registerFunction(function);
            Execution dataSet = FunctionService.onRegion(pr);
            try {
                dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function);
            } catch (Exception expected) {
                // No data should cause exec to throw
                LogWriterUtils.getLogWriter().warning("Exception Occurred : " + expected.getMessage());
            // boolean expectedStr = expected.getMessage().startsWith("No target
            // node was found for routingKey");
            // assertTrue("Unexpected exception: " + expected, expectedStr);
            }
            int j = 0;
            HashSet origVals = new HashSet();
            for (Iterator i = testKeysSet.iterator(); i.hasNext(); ) {
                Integer val = new Integer(j++);
                origVals.add(val);
                pr.put(i.next(), val);
            }
            // DefaultResultCollector rc1 = new DefaultResultCollector();
            ResultCollector rc1 = dataSet.withFilter(testKeysSet).setArguments(Boolean.TRUE).execute(function);
            List l = ((List) rc1.getResult());
            assertEquals(3, l.size());
            for (Iterator i = l.iterator(); i.hasNext(); ) {
                assertEquals(Boolean.TRUE, i.next());
            }
            // DefaultResultCollector rc2 = new DefaultResultCollector();
            ResultCollector rc2 = dataSet.withFilter(testKeysSet).setArguments(testKeysSet).execute(function);
            List l2 = ((List) rc2.getResult());
            // assertIndexDetailsEquals(pr.getTotalNumberOfBuckets(), l2.size());
            assertEquals(3, l2.size());
            // assertIndexDetailsEquals(pr.getTotalNumberOfBuckets(), l.size());
            HashSet foundVals = new HashSet();
            for (Iterator i = l2.iterator(); i.hasNext(); ) {
                ArrayList subL = (ArrayList) i.next();
                assertTrue(subL.size() > 0);
                for (Iterator subI = subL.iterator(); subI.hasNext(); ) {
                    assertTrue(foundVals.add(subI.next()));
                }
            }
            assertEquals(origVals, foundVals);
            return Boolean.TRUE;
        }
    });
    assertEquals(Boolean.TRUE, o);
}
Also used : TestFunction(org.apache.geode.internal.cache.functions.TestFunction) RegionAttributes(org.apache.geode.cache.RegionAttributes) ArrayList(java.util.ArrayList) Host(org.apache.geode.test.dunit.Host) IgnoredException(org.apache.geode.test.dunit.IgnoredException) FunctionException(org.apache.geode.cache.execute.FunctionException) Function(org.apache.geode.cache.execute.Function) TestFunction(org.apache.geode.internal.cache.functions.TestFunction) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) PartitionAttributesImpl(org.apache.geode.internal.cache.PartitionAttributesImpl) Execution(org.apache.geode.cache.execute.Execution) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) ResultCollector(org.apache.geode.cache.execute.ResultCollector) HashSet(java.util.HashSet) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Aggregations

Function (org.apache.geode.cache.execute.Function)261 TestFunction (org.apache.geode.internal.cache.functions.TestFunction)204 Test (org.junit.Test)156 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)148 HashSet (java.util.HashSet)124 FunctionException (org.apache.geode.cache.execute.FunctionException)122 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)122 Execution (org.apache.geode.cache.execute.Execution)121 ArrayList (java.util.ArrayList)110 ResultCollector (org.apache.geode.cache.execute.ResultCollector)110 List (java.util.List)86 Region (org.apache.geode.cache.Region)83 IgnoredException (org.apache.geode.test.dunit.IgnoredException)75 FlakyTest (org.apache.geode.test.junit.categories.FlakyTest)74 Iterator (java.util.Iterator)68 ClientServerTest (org.apache.geode.test.junit.categories.ClientServerTest)67 SerializableCallable (org.apache.geode.test.dunit.SerializableCallable)63 VM (org.apache.geode.test.dunit.VM)62 Host (org.apache.geode.test.dunit.Host)61 AttributesFactory (org.apache.geode.cache.AttributesFactory)56