use of org.apache.geode.cache.query.data.PortfolioData in project geode by apache.
the class PRQueryRegionDestroyedDUnitTest method testPRWithRegionDestroyInOneDatastoreWithDelay.
/**
* This test <br>
* 1. Creates PR regions across with scope = DACK, one accessor node & 2 datastores <br>
* 2. Creates a Local region on one of the VM's <br>
* 3. Puts in the same data both in PR region & the Local Region <br>
* 4. Queries the data both in local & PR <br>
* 5. Also calls Region.close() randomly on one of the datastore VM's with delay <br>
* 6. then recreates the PR on the same VM <br>
* 7. Verfies the size , type , contents of both the resultSets Obtained <br>
*/
@Test
public void testPRWithRegionDestroyInOneDatastoreWithDelay() throws Exception {
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying with PR Destroy Region Operation Test Started");
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
setCacheInVMs(vm0, vm1, vm2, vm3);
List vmList = new LinkedList();
vmList.add(vm1);
vmList.add(vm2);
vmList.add(vm3);
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating Accessor node on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name, redundancy, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created Accessor node on VM0");
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating PR's across all VM1 , VM2, VM3");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name, redundancy, PortfolioData.class));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name, redundancy, PortfolioData.class));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name, redundancy, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created PR on VM1 , VM2, VM3");
// creating a local region on one of the JVM's
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating Local Region on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created Local Region on VM0");
// Generating portfolio object array to be populated across the PR's & Local
// Regions
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
Random random = new Random();
AsyncInvocation async0;
// Execute query first time. This is to make sure all the buckets are created
// (lazy bucket creation).
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying on VM0 First time");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(name, localName));
// Now execute the query. And while query execution in process destroy the region
// on one of the node.
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying on VM0 both on PR Region & local ,also Comparing the Results sets from both");
async0 = vm0.invokeAsync(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(name, localName));
Wait.pause(5);
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Calling for Region.destroyRegion() on either of the Datastores VM1 , VM2 at random and then recreating the cache, with a predefined Delay ");
int k = (random.nextInt(vmList.size()));
((VM) (vmList.get(k))).invoke(PRQHelp.getCacheSerializableRunnableForRegionClose(name, redundancy, PortfolioData.class));
ThreadUtils.join(async0, 30 * 1000);
if (async0.exceptionOccurred()) {
// for Elbe, certain exceptions when a region is destroyed are acceptable
// including ForceReattemptException (e.g. resulting from RegionDestroyed)
boolean isForceReattempt = false;
Throwable t = async0.getException();
do {
if (t instanceof ForceReattemptException) {
isForceReattempt = true;
break;
}
t = t.getCause();
} while (t != null);
if (!isForceReattempt) {
Assert.fail("Unexpected exception during query", async0.getException());
}
}
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying with PR Destroy Region Operation Test ENDED");
}
use of org.apache.geode.cache.query.data.PortfolioData in project geode by apache.
the class PRQueryRegionDestroyedJUnitTest method testQueryOnSingleDataStore.
/**
* Tests the execution of query on a PartitionedRegion created on a single data store. <br>
* 1. Creates a PR with redundancy=0 on a single VM. <br>
* 2. Puts some test Objects in cache.<br>
* 3. Create a Thread and fire queries on the data and verifies the result.<br>
* 4. Create another Thread and call Region#destroyRegion() on the PR region.<br>
*
*
* @throws Exception
*/
@Test
public void testQueryOnSingleDataStore() throws Exception {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Test Started ");
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: creating PR Region ");
final Region region = PartitionedRegionTestHelper.createPartitionedRegion(regionName, localMaxMemory, redundancy);
final Region localRegion = PartitionedRegionTestHelper.createLocalRegion(localRegionName);
final StringBuffer errorBuf = new StringBuffer("");
PortfolioData[] portfolios = new PortfolioData[dataSize];
try {
for (int j = 0; j < dataSize; j++) {
portfolios[j] = new PortfolioData(j);
}
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: populating PortfolioData into the PR Datastore ");
populateData(region, portfolios);
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: populating PortfolioData into the PR Datastore ");
populateData(localRegion, portfolios);
final String[] queryString = { "ID = 0 OR ID = 1", "ID > 4 AND ID < 9", "ID = 5", "ID < 5 ", "ID <= 5" };
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Creating a Thread which will fire queries on the datastore");
Thread t1 = new Thread(new Runnable() {
public void run() {
final String expectedRegionDestroyedException = RegionDestroyedException.class.getName();
logger.info("<ExpectedException action=add>" + expectedRegionDestroyedException + "</ExpectedException>");
for (int i = 0; i < queryString.length; i++) {
try {
SelectResults resSetPR = region.query(queryString[i]);
SelectResults resSetLocal = localRegion.query(queryString[i]);
String failureString = PartitionedRegionTestHelper.compareResultSets(resSetPR, resSetLocal);
Thread.sleep(delayQuery);
if (failureString != null) {
errorBuf.append(failureString);
throw (new Exception(failureString));
}
} catch (InterruptedException ie) {
fail("interrupted");
} catch (QueryInvocationTargetException qite) {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: QueryInvocationTargetException as Expected " + qite);
} catch (RegionDestroyedException rde) {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: RegionDestroyedException as Expected " + rde);
} catch (RegionNotFoundException rnfe) {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: RegionNotFoundException as Expected " + rnfe);
} catch (Exception qe) {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Unexpected Exception " + qe);
encounteredException = true;
StringWriter sw = new StringWriter();
qe.printStackTrace(new PrintWriter(sw));
errorBuf.append(sw);
}
}
logger.info("<ExpectedException action=remove>" + expectedRegionDestroyedException + "</ExpectedException>");
}
});
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Creating a Thread which will call Region.destroyRegion() on the datastore ");
Thread t2 = new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(2500);
} catch (InterruptedException ie) {
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore:Thread Interrupted Exceptionduring region Destroy ");
fail("interrupted");
}
region.destroyRegion();
}
});
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Initiating the Threads");
t1.start();
t2.start();
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Waiting for the Threads to join ");
ThreadUtils.join(t1, 30 * 1000);
ThreadUtils.join(t2, 30 * 1000);
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: checking for any Unexpected Exception's occurred");
assertFalse("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Exception occurred in Query-thread", encounteredException);
} catch (Exception e) {
e.printStackTrace();
fail("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Test failed because of exception " + e);
}
logger.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Test Ended");
}
use of org.apache.geode.cache.query.data.PortfolioData in project geode by apache.
the class PRQueryRemoteNodeExceptionDUnitTest method testForceReattemptExceptionFromLocal.
@Test
public void testForceReattemptExceptionFromLocal() throws Exception {
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
setCacheInVMs(vm0, vm1, vm2);
List vmList = new LinkedList();
vmList.add(vm1);
vmList.add(vm0);
vmList.add(vm2);
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
numOfBuckets));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
numOfBuckets));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1, /* redundancy */
numOfBuckets));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
// Generating portfolio object array to be populated across the PR's & Local
// Regions
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
// Insert the test hooks on local and remote node.
// Test hook on remote node will throw CacheException while Test hook on local node will throw
// QueryException.
vm1.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
class MyQueryObserver extends IndexTrackingQueryObserver {
private int noOfAccess = 0;
@Override
public void startQuery(Query query) {
Object region = ((DefaultQuery) query).getRegionsInQuery(null).iterator().next();
LogWriterUtils.getLogWriter().info("Region type on VM1:" + region);
if (noOfAccess == 1) {
PartitionedRegion pr = (PartitionedRegion) PRQHelp.getCache().getRegion(name);
List buks = pr.getLocalPrimaryBucketsListTestOnly();
LogWriterUtils.getLogWriter().info("Available buckets:" + buks);
int bukId = ((Integer) (buks.get(0))).intValue();
LogWriterUtils.getLogWriter().info("Destroying bucket id:" + bukId);
pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
}
++noOfAccess;
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
}
;
});
vm0.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
boolean gotException = false;
Cache cache = PRQHelp.getCache();
class MyQueryObserver extends QueryObserverAdapter {
private int noOfAccess = 0;
@Override
public void startQuery(Query query) {
Object region = ((DefaultQuery) query).getRegionsInQuery(null).iterator().next();
LogWriterUtils.getLogWriter().info("Region type on VM0:" + region);
if (noOfAccess == 2) {
PartitionedRegion pr = (PartitionedRegion) PRQHelp.getCache().getRegion(name);
List buks = pr.getLocalPrimaryBucketsListTestOnly();
LogWriterUtils.getLogWriter().info("Available buckets:" + buks);
int bukId = ((Integer) (buks.get(0))).intValue();
LogWriterUtils.getLogWriter().info("Destroying bucket id:" + bukId);
pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
}
++noOfAccess;
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
final DefaultQuery query = (DefaultQuery) cache.getQueryService().newQuery("Select * from /" + name);
try {
query.execute();
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Query executed successfully with ForceReattemptException on local and remote both.");
} catch (Exception ex) {
gotException = true;
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test received Exception", ex);
}
}
});
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
use of org.apache.geode.cache.query.data.PortfolioData in project geode by apache.
the class PRQueryRemoteNodeExceptionDUnitTest method testPRWithLocalAndRemoteException.
/**
* This test <br>
* 1. Creates PR regions across with scope = DACK, 2 data-stores <br>
* 2. Creates a Local region on one of the VM's <br>
* 3. Puts in the same data both in PR region & the Local Region <br>
* 4. Queries the data both in local & PR <br>
* 5. Puts a QueryObservers in both local as well as remote data-store node, to throw some test
* exceptions. <br>
* 6. then re-executes the query on one of the data-store node. <br>
* 7. Verifies the exception thrown is from local node not from remote node <br>
*/
@Test
public void testPRWithLocalAndRemoteException() throws Exception {
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
setCacheInVMs(vm0, vm1);
List vmList = new LinkedList();
vmList.add(vm1);
vmList.add(vm0);
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
// Generating portfolio object array to be populated across the PR's & Local
// Regions
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
// Execute query first time. This is to make sure all the buckets are
// created
// (lazy bucket creation).
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying on VM0 First time");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(name, localName));
// Insert the test hooks on local and remote node.
// Test hook on remote node will throw CacheException while Test hook on local node will throw
// QueryException.
vm1.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
class MyQueryObserver extends IndexTrackingQueryObserver {
@Override
public void startQuery(Query query) {
throw new RuntimeException("For testing purpose only from remote node");
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
}
;
});
vm0.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
boolean gotException = false;
Cache cache = PRQHelp.getCache();
class MyQueryObserver extends QueryObserverAdapter {
@Override
public void startQuery(Query query) {
throw new RuntimeException("For testing purpose only from local node");
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
final DefaultQuery query = (DefaultQuery) cache.getQueryService().newQuery("Select * from /" + name);
try {
query.execute();
} catch (Exception ex) {
gotException = true;
if (ex.getMessage().contains("local node")) {
// ex.printStackTrace();
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
}
}
if (!gotException) {
fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test did not receive Exception as expected from local as well as remote node");
}
}
});
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
use of org.apache.geode.cache.query.data.PortfolioData in project geode by apache.
the class PRQueryRemoteNodeExceptionDUnitTest method testCacheCloseExceptionFromLocalAndRemote2.
@Test
public void testCacheCloseExceptionFromLocalAndRemote2() throws Exception {
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
setCacheInVMs(vm0, vm1);
List vmList = new LinkedList();
vmList.add(vm1);
vmList.add(vm0);
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, PortfolioData.class));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
// Generating portfolio object array to be populated across the PR's & Local
// Regions
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName, portfolio, cnt, cntDest));
LogWriterUtils.getLogWriter().info("PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
// Insert the test hooks on local and remote node.
// Test hook on remote node will throw CacheException while Test hook on local node will throw
// QueryException.
vm1.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
class MyQueryObserver extends IndexTrackingQueryObserver {
private int noOfAccess = 0;
@Override
public void afterIterationEvaluation(Object result) {
LogWriterUtils.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
if (noOfAccess > 1) {
PRQHelp.getCache().getRegion(name).destroyRegion();
}
++noOfAccess;
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
}
;
});
vm0.invoke(new CacheSerializableRunnable(name) {
@Override
public void run2() throws CacheException {
boolean gotException = false;
Cache cache = PRQHelp.getCache();
class MyQueryObserver extends QueryObserverAdapter {
private int noOfAccess = 0;
@Override
public void afterIterationEvaluation(Object result) {
// Object region = ((DefaultQuery)query).getRegionsInQuery(null).iterator().next();
// getLogWriter().info("Region type:"+region);
int i = 0;
while (i <= 10) {
Region region = PRQHelp.getCache().getRegion(name);
if (region == null || region.isDestroyed()) {
break;
}
try {
Thread.sleep(10);
} catch (Exception ex) {
}
i++;
}
}
}
;
QueryObserverHolder.setInstance(new MyQueryObserver());
final DefaultQuery query = (DefaultQuery) cache.getQueryService().newQuery("Select * from /" + name + " p where p.ID > 0");
try {
query.execute();
} catch (Exception ex) {
gotException = true;
if (ex instanceof QueryInvocationTargetException) {
LogWriterUtils.getLogWriter().info(ex.getMessage());
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from remote node successfully as region.destroy happened before cache.close().");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
}
}
if (!gotException) {
fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test did not receive Exception as expected from local as well as remote node");
}
}
});
LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
Aggregations