Search in sources :

Example 21 with CacheSerializableRunnable

use of org.apache.geode.cache30.CacheSerializableRunnable in project geode by apache.

the class PRQueryRemoteNodeExceptionDUnitTest method testForceReattemptExceptionFromLocal.

@Test
public void testForceReattemptExceptionFromLocal() throws Exception {
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
    Host host = Host.getHost(0);
    VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    VM vm2 = host.getVM(2);
    setCacheInVMs(vm0, vm1, vm2);
    List vmList = new LinkedList();
    vmList.add(vm1);
    vmList.add(vm0);
    vmList.add(vm2);
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
    numOfBuckets));
    vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
    numOfBuckets));
    vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
    numOfBuckets));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
    // creating a local region on one of the JVM's
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
    // Generating portfolio object array to be populated across the PR's & Local
    // Regions
    final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
    // Putting the data into the accessor node
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
    // Putting the same data in the local region created
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node  VM0 for result Set Comparison");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node  VM0 for result Set Comparison");
    // Insert the test hooks on local and remote node.
    // Test hook on remote node will throw CacheException while Test hook on local node will throw
    // QueryException.
    vm1.invoke(new CacheSerializableRunnable(name) {

        @Override
        public void run2() throws CacheException {
            class MyQueryObserver extends IndexTrackingQueryObserver {

                private int noOfAccess = 0;

                @Override
                public void startQuery(Query query) {
                    Object region = ((DefaultQuery) query).getRegionsInQuery(null).iterator().next();
                    LogWriterUtils.getLogWriter().info("Region type on VM1:" + region);
                    if (noOfAccess == 1) {
                        PartitionedRegion pr = (PartitionedRegion) PRQHelp.getCache().getRegion(name);
                        List buks = pr.getLocalPrimaryBucketsListTestOnly();
                        LogWriterUtils.getLogWriter().info("Available buckets:" + buks);
                        int bukId = ((Integer) (buks.get(0))).intValue();
                        LogWriterUtils.getLogWriter().info("Destroying bucket id:" + bukId);
                        pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
                    }
                    ++noOfAccess;
                }
            }
            ;
            QueryObserverHolder.setInstance(new MyQueryObserver());
        }

        ;
    });
    vm0.invoke(new CacheSerializableRunnable(name) {

        @Override
        public void run2() throws CacheException {
            boolean gotException = false;
            Cache cache = PRQHelp.getCache();
            class MyQueryObserver extends QueryObserverAdapter {

                private int noOfAccess = 0;

                @Override
                public void startQuery(Query query) {
                    Object region = ((DefaultQuery) query).getRegionsInQuery(null).iterator().next();
                    LogWriterUtils.getLogWriter().info("Region type on VM0:" + region);
                    if (noOfAccess == 2) {
                        PartitionedRegion pr = (PartitionedRegion) PRQHelp.getCache().getRegion(name);
                        List buks = pr.getLocalPrimaryBucketsListTestOnly();
                        LogWriterUtils.getLogWriter().info("Available buckets:" + buks);
                        int bukId = ((Integer) (buks.get(0))).intValue();
                        LogWriterUtils.getLogWriter().info("Destroying bucket id:" + bukId);
                        pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
                    }
                    ++noOfAccess;
                }
            }
            ;
            QueryObserverHolder.setInstance(new MyQueryObserver());
            final DefaultQuery query = (DefaultQuery) cache.getQueryService().newQuery("Select * from /" + name);
            try {
                query.execute();
                LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Query executed successfully with ForceReattemptException on local and remote both.");
            } catch (Exception ex) {
                gotException = true;
                Assert.fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test received Exception", ex);
            }
        }
    });
    LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
Also used : DefaultQuery(org.apache.geode.cache.query.internal.DefaultQuery) Query(org.apache.geode.cache.query.Query) DefaultQuery(org.apache.geode.cache.query.internal.DefaultQuery) CacheException(org.apache.geode.cache.CacheException) Host(org.apache.geode.test.dunit.Host) LinkedList(java.util.LinkedList) CacheClosedException(org.apache.geode.cache.CacheClosedException) CacheException(org.apache.geode.cache.CacheException) QueryInvocationTargetException(org.apache.geode.cache.query.QueryInvocationTargetException) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) VM(org.apache.geode.test.dunit.VM) LinkedList(java.util.LinkedList) List(java.util.List) PortfolioData(org.apache.geode.cache.query.data.PortfolioData) Cache(org.apache.geode.cache.Cache) Test(org.junit.Test) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Example 22 with CacheSerializableRunnable

use of org.apache.geode.cache30.CacheSerializableRunnable in project geode by apache.

the class PRQueryRemoteNodeExceptionDUnitTest method testPRWithLocalAndRemoteException.

/**
   * This test <br>
   * 1. Creates PR regions across with scope = DACK, 2 data-stores <br>
   * 2. Creates a Local region on one of the VM's <br>
   * 3. Puts in the same data both in PR region & the Local Region <br>
   * 4. Queries the data both in local & PR <br>
   * 5. Puts a QueryObservers in both local as well as remote data-store node, to throw some test
   * exceptions. <br>
   * 6. then re-executes the query on one of the data-store node. <br>
   * 7. Verifies the exception thrown is from local node not from remote node <br>
   */
@Test
public void testPRWithLocalAndRemoteException() throws Exception {
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
    Host host = Host.getHost(0);
    VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    setCacheInVMs(vm0, vm1);
    List vmList = new LinkedList();
    vmList.add(vm1);
    vmList.add(vm0);
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
    vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
    // creating a local region on one of the JVM's
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
    // Generating portfolio object array to be populated across the PR's & Local
    // Regions
    final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
    // Putting the data into the accessor node
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
    // Putting the same data in the local region created
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node  VM0 for result Set Comparison");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node  VM0 for result Set Comparison");
    // Execute query first time. This is to make sure all the buckets are
    // created
    // (lazy bucket creation).
    LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying on VM0 First time");
    vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(name, localName));
    // Insert the test hooks on local and remote node.
    // Test hook on remote node will throw CacheException while Test hook on local node will throw
    // QueryException.
    vm1.invoke(new CacheSerializableRunnable(name) {

        @Override
        public void run2() throws CacheException {
            class MyQueryObserver extends IndexTrackingQueryObserver {

                @Override
                public void startQuery(Query query) {
                    throw new RuntimeException("For testing purpose only from remote node");
                }
            }
            ;
            QueryObserverHolder.setInstance(new MyQueryObserver());
        }

        ;
    });
    vm0.invoke(new CacheSerializableRunnable(name) {

        @Override
        public void run2() throws CacheException {
            boolean gotException = false;
            Cache cache = PRQHelp.getCache();
            class MyQueryObserver extends QueryObserverAdapter {

                @Override
                public void startQuery(Query query) {
                    throw new RuntimeException("For testing purpose only from local node");
                }
            }
            ;
            QueryObserverHolder.setInstance(new MyQueryObserver());
            final DefaultQuery query = (DefaultQuery) cache.getQueryService().newQuery("Select * from /" + name);
            try {
                query.execute();
            } catch (Exception ex) {
                gotException = true;
                if (ex.getMessage().contains("local node")) {
                    // ex.printStackTrace();
                    LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
                } else {
                    Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
                }
            }
            if (!gotException) {
                fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test did not receive Exception as expected from local as well as remote node");
            }
        }
    });
    LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
Also used : DefaultQuery(org.apache.geode.cache.query.internal.DefaultQuery) Query(org.apache.geode.cache.query.Query) DefaultQuery(org.apache.geode.cache.query.internal.DefaultQuery) CacheException(org.apache.geode.cache.CacheException) Host(org.apache.geode.test.dunit.Host) LinkedList(java.util.LinkedList) CacheClosedException(org.apache.geode.cache.CacheClosedException) CacheException(org.apache.geode.cache.CacheException) QueryInvocationTargetException(org.apache.geode.cache.query.QueryInvocationTargetException) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) VM(org.apache.geode.test.dunit.VM) LinkedList(java.util.LinkedList) List(java.util.List) PortfolioData(org.apache.geode.cache.query.data.PortfolioData) Cache(org.apache.geode.cache.Cache) Test(org.junit.Test) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Example 23 with CacheSerializableRunnable

use of org.apache.geode.cache30.CacheSerializableRunnable in project geode by apache.

the class PRQueryDUnitHelper method getCacheSerializableRunnableForLocalRegionCreation.

public CacheSerializableRunnable getCacheSerializableRunnableForLocalRegionCreation(final String regionName, final Class constraint) {
    SerializableRunnable createPrRegion;
    createPrRegion = new CacheSerializableRunnable(regionName) {

        @Override
        public void run2() throws CacheException {
            Cache cache = getCache();
            Region localRegion = null;
            try {
                AttributesFactory attr = new AttributesFactory();
                attr.setValueConstraint(constraint);
                attr.setScope(Scope.LOCAL);
                localRegion = cache.createRegion(regionName, attr.create());
            } catch (IllegalStateException ex) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().warning("PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Creation caught IllegalStateException", ex);
            }
            assertNotNull("PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Partitioned Region " + regionName + " not in cache", cache.getRegion(regionName));
            assertNotNull("PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Partitioned Region ref null", localRegion);
            assertTrue("PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Partitioned Region ref claims to be destroyed", !localRegion.isDestroyed());
        }
    };
    return (CacheSerializableRunnable) createPrRegion;
}
Also used : AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) CacheException(org.apache.geode.cache.CacheException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) LocalRegion(org.apache.geode.internal.cache.LocalRegion) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) Cache(org.apache.geode.cache.Cache)

Example 24 with CacheSerializableRunnable

use of org.apache.geode.cache30.CacheSerializableRunnable in project geode by apache.

the class PRQueryDUnitHelper method getCacheSerializableRunnableForPROrderByQueryWithLimit.

public CacheSerializableRunnable getCacheSerializableRunnableForPROrderByQueryWithLimit(final String regionName, final String localRegion) {
    SerializableRunnable PrRegion = new CacheSerializableRunnable("PRQuery") {

        public void run2() throws CacheException {
            Cache cache = getCache();
            // Querying the localRegion and the PR region
            String[] queries = new String[] { "status as st from /REGION_NAME order by status", "p.status from /REGION_NAME p order by p.status", "p.position1.secId, p.ID from /REGION_NAME p order by p.position1.secId, p.ID desc", "key from /REGION_NAME.keys key order by key.status, key.ID", "key.ID from /REGION_NAME.keys key order by key.ID", "key.ID, key.status from /REGION_NAME.keys key order by key.status, key.ID asc", "key.ID, key.status from /REGION_NAME.keys key order by key.status desc, key.ID", "p.status, p.ID from /REGION_NAME p order by p.status asc, p.ID", "p.ID from /REGION_NAME p, p.positions.values order by p.ID", "* from /REGION_NAME p, p.positions.values val order by p.ID, val.secId", "p.iD, p.status from /REGION_NAME p order by p.iD", "iD, status from /REGION_NAME order by iD", "* from /REGION_NAME p order by p.getID()", "* from /REGION_NAME p order by p.getP1().secId, p.ID desc, p.ID", " p.position1.secId , p.ID as st from /REGION_NAME p order by p.position1.secId, p.ID", "e.key.ID, e.value.status from /REGION_NAME.entrySet e order by e.key.ID, e.value.status desc", "e.key from /REGION_NAME.entrySet e order by e.key.ID, e.key.pkid desc", "p, pos from /REGION_NAME p, p.positions.values pos order by p.ID, pos.secId desc", "p, pos from /REGION_NAME p, p.positions.values pos order by pos.secId, p.ID", "status , ID as ied from /REGION_NAME where ID > 0 order by status, ID desc", "p.status as st, p.ID as id from /REGION_NAME p where ID > 0 and status = 'inactive' order by p.status, p.ID desc", "p.position1.secId as st, p.ID as ied from /REGION_NAME p where p.ID > 0 and p.position1.secId != 'IBM' order by p.position1.secId, p.ID", " key.status as st, key.ID from /REGION_NAME.keys key where key.ID > 5 order by key.status, key.ID desc", " key.ID, key.status as st from /REGION_NAME.keys key where key.status = 'inactive' order by key.status desc, key.ID" };
            Object[][] r = new Object[queries.length][2];
            Region local = cache.getRegion(localRegion);
            Region region = cache.getRegion(regionName);
            assertNotNull(region);
            final String[] expectedExceptions = new String[] { RegionDestroyedException.class.getName(), ReplyException.class.getName(), CacheClosedException.class.getName(), ForceReattemptException.class.getName(), QueryInvocationTargetException.class.getName() };
            for (final String expectedException : expectedExceptions) {
                getCache().getLogger().info("<ExpectedException action=add>" + expectedException + "</ExpectedException>");
            }
            String distinct = "<TRACE>SELECT DISTINCT ";
            QueryService qs = getCache().getQueryService();
            Object[] params;
            try {
                for (int l = 1; l <= 3; l++) {
                    String[] rq = new String[queries.length];
                    for (int j = 0; j < queries.length; j++) {
                        String qStr = null;
                        synchronized (region) {
                            // Execute on local region.
                            qStr = (distinct + queries[j].replace("REGION_NAME", localRegion));
                            qStr += (" LIMIT " + (l * l));
                            rq[j] = qStr;
                            SelectResults sr = (SelectResults) qs.newQuery(qStr).execute();
                            r[j][0] = sr;
                            if (sr.asList().size() > l * l) {
                                fail("The resultset size exceeds limit size. Limit size=" + l * l + ", result size =" + sr.asList().size());
                            }
                            // Execute on remote region.
                            qStr = (distinct + queries[j].replace("REGION_NAME", regionName));
                            qStr += (" LIMIT " + (l * l));
                            rq[j] = qStr;
                            SelectResults srr = (SelectResults) qs.newQuery(qStr).execute();
                            r[j][1] = srr;
                            if (srr.size() > l * l) {
                                fail("The resultset size exceeds limit size. Limit size=" + l * l + ", result size =" + srr.asList().size());
                            }
                        // assertIndexDetailsEquals("The resultset size is not same as limit size.", l*l,
                        // srr.asList().size());
                        // getCache().getLogger().info("Finished executing PR query: " + qStr);
                        }
                    }
                    StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
                    ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, true, rq);
                }
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
            } catch (QueryInvocationTargetException e) {
                // not it's okay
                throw new TestException("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception", e);
            } catch (QueryException e) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().error("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying" + e, e);
                throw new TestException("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception", e);
            } catch (RegionDestroyedException rde) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ", rde);
            } catch (CancelException cce) {
                org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ", cce);
            } finally {
                for (final String expectedException : expectedExceptions) {
                    getCache().getLogger().info("<ExpectedException action=remove>" + expectedException + "</ExpectedException>");
                }
            }
        }
    };
    return (CacheSerializableRunnable) PrRegion;
}
Also used : StructSetOrResultsSet(org.apache.geode.cache.query.functional.StructSetOrResultsSet) TestException(util.TestException) RegionDestroyedException(org.apache.geode.cache.RegionDestroyedException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryInvocationTargetException(org.apache.geode.cache.query.QueryInvocationTargetException) QueryException(org.apache.geode.cache.query.QueryException) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryService(org.apache.geode.cache.query.QueryService) LocalRegion(org.apache.geode.internal.cache.LocalRegion) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) CancelException(org.apache.geode.CancelException) Cache(org.apache.geode.cache.Cache)

Example 25 with CacheSerializableRunnable

use of org.apache.geode.cache30.CacheSerializableRunnable in project geode by apache.

the class PRQueryDUnitHelper method getCacheSerializableRunnableForPRDuplicatePuts.

/**
   * This function puts portfolio objects into the created Region (PR or Local) *
   * 
   * @param regionName
   * @param portfolio
   * @param to
   * @param from
   * @return cacheSerializable object
   */
public CacheSerializableRunnable getCacheSerializableRunnableForPRDuplicatePuts(final String regionName, final Object[] portfolio, final int from, final int to) {
    SerializableRunnable prPuts = new CacheSerializableRunnable("PRPuts") {

        @Override
        public void run2() throws CacheException {
            Cache cache = getCache();
            Region region = cache.getRegion(regionName);
            for (int j = from, i = to; j < to; j++, i++) region.put(new Integer(i), portfolio[j]);
        }
    };
    return (CacheSerializableRunnable) prPuts;
}
Also used : CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) LocalRegion(org.apache.geode.internal.cache.LocalRegion) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) Cache(org.apache.geode.cache.Cache)

Aggregations

CacheSerializableRunnable (org.apache.geode.cache30.CacheSerializableRunnable)595 CacheException (org.apache.geode.cache.CacheException)415 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)369 Test (org.junit.Test)369 Region (org.apache.geode.cache.Region)307 VM (org.apache.geode.test.dunit.VM)279 Host (org.apache.geode.test.dunit.Host)274 SerializableRunnable (org.apache.geode.test.dunit.SerializableRunnable)179 FlakyTest (org.apache.geode.test.junit.categories.FlakyTest)165 AttributesFactory (org.apache.geode.cache.AttributesFactory)145 IOException (java.io.IOException)135 Cache (org.apache.geode.cache.Cache)124 QueryService (org.apache.geode.cache.query.QueryService)118 PartitionAttributesFactory (org.apache.geode.cache.PartitionAttributesFactory)107 LocalRegion (org.apache.geode.internal.cache.LocalRegion)106 SelectResults (org.apache.geode.cache.query.SelectResults)85 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)75 ClientServerTest (org.apache.geode.test.junit.categories.ClientServerTest)71 IgnoredException (org.apache.geode.test.dunit.IgnoredException)65 ClientSubscriptionTest (org.apache.geode.test.junit.categories.ClientSubscriptionTest)61