Search in sources :

Example 1 with PortfolioPdx

use of org.apache.geode.cache.query.data.PortfolioPdx in project geode by apache.

the class DiskStoreCommandsDUnitTest method testOfflineDiskStorePdxCommands.

@Test
public void testOfflineDiskStorePdxCommands() {
    final Properties props = new Properties();
    props.setProperty(MCAST_PORT, "0");
    props.setProperty(START_LOCATOR, "localhost[" + AvailablePortHelper.getRandomAvailableTCPPort() + "]");
    final File diskStoreDir = new File(new File(".").getAbsolutePath(), "DiskStoreCommandDUnitDiskStores");
    diskStoreDir.mkdir();
    this.filesToBeDeleted.add(diskStoreDir.getAbsolutePath());
    final String diskStoreName1 = "DiskStore1";
    final String region1 = "Region1";
    final VM vm1 = Host.getHost(0).getVM(1);
    vm1.invoke(new SerializableRunnable() {

        public void run() {
            final Cache cache = new CacheFactory(props).setPdxPersistent(true).setPdxDiskStore(diskStoreName1).create();
            DiskStoreFactory diskStoreFactory = cache.createDiskStoreFactory();
            diskStoreFactory.setDiskDirs(new File[] { diskStoreDir });
            final DiskStore diskStore1 = diskStoreFactory.create(diskStoreName1);
            assertNotNull(diskStore1);
            RegionFactory regionFactory = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
            regionFactory.setDiskStoreName(diskStoreName1);
            regionFactory.setDiskSynchronous(true);
            Region r1 = regionFactory.create(region1);
            r1.put("key-1", new PortfolioPdx(1));
            cache.close();
            assertTrue(new File(diskStoreDir, "BACKUP" + diskStoreName1 + ".if").exists());
        }
    });
    CommandResult cmdResult = executeCommand("describe offline-disk-store --name=" + diskStoreName1 + " --disk-dirs=" + diskStoreDir.getAbsolutePath() + " --pdx=true");
    String stringResult = commandResultToString(cmdResult);
    assertTrue(stringContainsLine(stringResult, ".*PDX Types.*"));
    assertTrue(stringContainsLine(stringResult, ".*org\\.apache\\.geode\\.cache\\.query\\.data\\.PortfolioPdx.*"));
    assertTrue(stringContainsLine(stringResult, ".*org\\.apache\\.geode\\.cache\\.query\\.data\\.PositionPdx.*"));
    assertTrue(stringContainsLine(stringResult, ".*PDX Enums.*"));
    assertTrue(stringContainsLine(stringResult, ".*org\\.apache\\.geode\\.cache\\.query\\.data\\.PortfolioPdx\\$Day.*"));
}
Also used : SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) PortfolioPdx(org.apache.geode.cache.query.data.PortfolioPdx) Properties(java.util.Properties) DiskStoreFactory(org.apache.geode.cache.DiskStoreFactory) CommandResult(org.apache.geode.management.internal.cli.result.CommandResult) DiskStore(org.apache.geode.cache.DiskStore) RegionFactory(org.apache.geode.cache.RegionFactory) VM(org.apache.geode.test.dunit.VM) PartitionedRegion(org.apache.geode.internal.cache.PartitionedRegion) Region(org.apache.geode.cache.Region) CacheFactory(org.apache.geode.cache.CacheFactory) File(java.io.File) Cache(org.apache.geode.cache.Cache) InternalCache(org.apache.geode.internal.cache.InternalCache) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) FlakyTest(org.apache.geode.test.junit.categories.FlakyTest) Test(org.junit.Test)

Example 2 with PortfolioPdx

use of org.apache.geode.cache.query.data.PortfolioPdx in project geode by apache.

the class PdxQueryDUnitTest method testPutAllWithIndexes.

/**
   * This test creates 3 cache servers with a PR and one client which puts some PDX values in PR and
   * runs a query. This was failing randomly in a POC.
   */
@Test
public void testPutAllWithIndexes() {
    final String name = "testRegion";
    final Host host = Host.getHost(0);
    VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    VM vm2 = host.getVM(2);
    VM vm3 = host.getVM(3);
    final Properties config = new Properties();
    config.setProperty("locators", "localhost[" + DistributedTestUtils.getDUnitLocatorPort() + "]");
    // Start server
    vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            Cache cache = new CacheFactory(config).create();
            AttributesFactory factory = new AttributesFactory();
            PartitionAttributesFactory prfactory = new PartitionAttributesFactory();
            prfactory.setRedundantCopies(0);
            factory.setPartitionAttributes(prfactory.create());
            cache.createRegionFactory(factory.create()).create(name);
            try {
                startCacheServer(0, false);
            } catch (Exception ex) {
                Assert.fail("While starting CacheServer", ex);
            }
            // Create Index on empty region
            try {
                cache.getQueryService().createIndex("myFuncIndex", "intId", "/" + name);
            } catch (Exception e) {
                Assert.fail("index creation failed", e);
            }
        }
    });
    // Start server
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            Cache cache = new CacheFactory(config).create();
            AttributesFactory factory = new AttributesFactory();
            PartitionAttributesFactory prfactory = new PartitionAttributesFactory();
            prfactory.setRedundantCopies(0);
            factory.setPartitionAttributes(prfactory.create());
            cache.createRegionFactory(factory.create()).create(name);
            try {
                startCacheServer(0, false);
            } catch (Exception ex) {
                Assert.fail("While starting CacheServer", ex);
            }
        }
    });
    // Start server
    vm2.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            Cache cache = new CacheFactory(config).create();
            AttributesFactory factory = new AttributesFactory();
            PartitionAttributesFactory prfactory = new PartitionAttributesFactory();
            prfactory.setRedundantCopies(0);
            factory.setPartitionAttributes(prfactory.create());
            cache.createRegionFactory(factory.create()).create(name);
            try {
                startCacheServer(0, false);
            } catch (Exception ex) {
                Assert.fail("While starting CacheServer", ex);
            }
        }
    });
    // Create client region
    final int port = vm0.invoke(() -> PdxQueryDUnitTest.getCacheServerPort());
    final String host0 = NetworkUtils.getServerHostName(vm2.getHost());
    vm3.invoke(new CacheSerializableRunnable("Create region") {

        public void run2() throws CacheException {
            Properties config = new Properties();
            config.setProperty("mcast-port", "0");
            ClientCache cache = new ClientCacheFactory(config).addPoolServer(host0, port).setPoolPRSingleHopEnabled(true).setPoolSubscriptionEnabled(true).create();
            AttributesFactory factory = new AttributesFactory();
            cache.createClientRegionFactory(ClientRegionShortcut.PROXY).create(name);
        }
    });
    vm3.invoke(new CacheSerializableRunnable("putAll() test") {

        @Override
        public void run2() throws CacheException {
            try {
                ClientCache cache = new ClientCacheFactory().create();
                Region region = cache.getRegion(name);
                QueryService queryService = cache.getQueryService();
                String k;
                for (int x = 0; x < 285; x++) {
                    k = Integer.valueOf(x).toString();
                    PortfolioPdx v = new PortfolioPdx(x, x);
                    region.put(k, v);
                }
                Query q = queryService.newQuery("SELECT DISTINCT * from /" + name + " WHERE ID = 2");
                SelectResults qResult = (SelectResults) q.execute();
                for (Object o : qResult.asList()) {
                    System.out.println("o = " + o);
                }
            } catch (Exception e) {
                Assert.fail("Querying failed: ", e);
            }
        }
    });
    Invoke.invokeInEveryVM(DistributedTestCase.class, "disconnectFromDS");
// }
}
Also used : Query(org.apache.geode.cache.query.Query) CacheException(org.apache.geode.cache.CacheException) Host(org.apache.geode.test.dunit.Host) PortfolioPdx(org.apache.geode.cache.query.data.PortfolioPdx) ClientCache(org.apache.geode.cache.client.ClientCache) Properties(java.util.Properties) CacheException(org.apache.geode.cache.CacheException) ClientCacheFactory(org.apache.geode.cache.client.ClientCacheFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryService(org.apache.geode.cache.query.QueryService) VM(org.apache.geode.test.dunit.VM) Region(org.apache.geode.cache.Region) CacheFactory(org.apache.geode.cache.CacheFactory) ClientCacheFactory(org.apache.geode.cache.client.ClientCacheFactory) Cache(org.apache.geode.cache.Cache) ClientCache(org.apache.geode.cache.client.ClientCache) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 3 with PortfolioPdx

use of org.apache.geode.cache.query.data.PortfolioPdx in project geode by apache.

the class PdxQueryDUnitTest method testPdxIdentity.

/**
   * Tests identity of Pdx.
   */
@Test
public void testPdxIdentity() throws CacheException {
    final Host host = Host.getHost(0);
    VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    VM vm2 = host.getVM(2);
    VM vm3 = host.getVM(3);
    final int numberOfEntries = 10;
    final String queryStr = "SELECT DISTINCT * FROM " + this.regName + " pf where pf.ID > 2 and pf.ID < 10";
    // Start server1
    vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            configAndStartBridgeServer();
            Region region = getRootRegion().getSubregion(regionName);
        }
    });
    // Start server2
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            configAndStartBridgeServer();
            Region region = getRootRegion().getSubregion(regionName);
        }
    });
    // Client pool.
    final int port0 = vm0.invoke(() -> PdxQueryDUnitTest.getCacheServerPort());
    final int port1 = vm1.invoke(() -> PdxQueryDUnitTest.getCacheServerPort());
    final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
    // Create client pool.
    final String poolName = "testClientServerQueryPool";
    createPool(vm2, poolName, new String[] { host0 }, new int[] { port0 }, true);
    createPool(vm3, poolName, new String[] { host0 }, new int[] { port1 }, true);
    final int dupIndex = 2;
    // Create client region
    vm3.invoke(new CacheSerializableRunnable("Create region") {

        public void run2() throws CacheException {
            AttributesFactory factory = new AttributesFactory();
            factory.setScope(Scope.LOCAL);
            ClientServerTestCase.configureConnectionPool(factory, host0, port1, -1, true, -1, -1, null);
            Region region = createRegion(regionName, rootRegionName, factory.create());
            int j = 0;
            for (int i = 0; i < numberOfEntries * 2; i++) {
                // insert duplicate values.
                if (i % dupIndex == 0) {
                    j++;
                }
                region.put("key-" + i, new PortfolioPdx(j));
            }
        }
    });
    // Execute client queries
    SerializableRunnable executeQueries = new CacheSerializableRunnable("Execute queries") {

        public void run2() throws CacheException {
            QueryService remoteQueryService = null;
            QueryService localQueryService = null;
            SelectResults[][] rs = new SelectResults[1][2];
            try {
                remoteQueryService = (PoolManager.find(poolName)).getQueryService();
                localQueryService = getCache().getQueryService();
            } catch (Exception e) {
                Assert.fail("Failed to get QueryService.", e);
            }
            int expectedResultSize = 7;
            try {
                logger.info("### Executing Query on server:" + queryStr);
                Query query = remoteQueryService.newQuery(queryStr);
                rs[0][0] = (SelectResults) query.execute();
                assertEquals(expectedResultSize, rs[0][0].size());
                logger.info("### Executing Query locally:" + queryStr);
                query = localQueryService.newQuery(queryStr);
                rs[0][1] = (SelectResults) query.execute();
                assertEquals(expectedResultSize, rs[0][1].size());
                logger.info("### Remote Query rs size: " + (rs[0][0]).size() + "Local Query rs size: " + (rs[0][1]).size());
                // Compare local and remote query results.
                if (!CacheUtils.compareResultsOfWithAndWithoutIndex(rs)) {
                    fail("Local and Remote Query Results are not matching for query :" + queryStr);
                }
            } catch (Exception e) {
                Assert.fail("Failed executing " + queryStr, e);
            }
        }
    };
    // vm2.invoke(executeQueries);
    vm3.invoke(executeQueries);
    // Check for TestObject instances on Server2.
    // It should be 0
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            assertEquals(0, PortfolioPdx.numInstance);
        }
    });
    this.closeClient(vm2);
    this.closeClient(vm3);
    this.closeClient(vm1);
    this.closeClient(vm0);
}
Also used : Query(org.apache.geode.cache.query.Query) CacheException(org.apache.geode.cache.CacheException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) Host(org.apache.geode.test.dunit.Host) PortfolioPdx(org.apache.geode.cache.query.data.PortfolioPdx) CacheException(org.apache.geode.cache.CacheException) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryService(org.apache.geode.cache.query.QueryService) VM(org.apache.geode.test.dunit.VM) Region(org.apache.geode.cache.Region) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 4 with PortfolioPdx

use of org.apache.geode.cache.query.data.PortfolioPdx in project geode by apache.

the class PdxQueryDUnitTest method testNestedAndCollectionPdx.

/**
   * Tests client-server query with nested and collection of Pdx.
   */
@Test
public void testNestedAndCollectionPdx() throws CacheException {
    final Host host = Host.getHost(0);
    VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    VM vm2 = host.getVM(2);
    VM vm3 = host.getVM(3);
    final int numberOfEntries = 50;
    final String[] queries = new String[] { "SELECT * FROM " + this.regName + " pf where pf.position1.secId > '2'", "SELECT * FROM " + this.regName + " p where p.position3[1].portfolioId = 2", "SELECT * FROM " + this.regName + " p, p.positions.values AS pos WHERE pos.secId != '1'", "SELECT key, positions FROM " + this.regName + ".entrySet, value.positions.values " + "positions WHERE positions.mktValue >= 25.00", "SELECT * FROM " + this.regName + " portfolio1, " + this.regName2 + " portfolio2 WHERE " + "portfolio1.status = portfolio2.status", "SELECT portfolio1.ID, portfolio2.status FROM " + this.regName + " portfolio1, " + this.regName + " portfolio2  WHERE portfolio1.status = portfolio2.status", "SELECT * FROM " + this.regName + " portfolio1, portfolio1.positions.values positions1, " + this.regName + " portfolio2,  portfolio2.positions.values positions2 WHERE " + "positions1.secId = positions2.secId ", "SELECT * FROM " + this.regName + " portfolio, portfolio.positions.values positions WHERE " + "portfolio.Pk IN SET ('1', '2') AND positions.secId = '1'", "SELECT DISTINCT * FROM " + this.regName + "  pf1, pf1.collectionHolderMap.values coll1," + " pf1.positions.values posit1, " + this.regName2 + "  pf2, pf2.collectionHolderMap.values " + " coll2, pf2.positions.values posit2 WHERE posit1.secId='IBM' AND posit2.secId='IBM'" };
    // Start server1
    vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            configAndStartBridgeServer();
            Region region1 = getRootRegion().getSubregion(regionName);
            Region region2 = getRootRegion().getSubregion(regionName2);
            for (int i = 0; i < numberOfEntries; i++) {
                region1.put("key-" + i, new PortfolioPdx(i, i));
                region2.put("key-" + i, new PortfolioPdx(i, i));
            }
        }
    });
    // Start server2
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            configAndStartBridgeServer();
            Region region = getRootRegion().getSubregion(regionName);
            Region region2 = getRootRegion().getSubregion(regionName2);
        }
    });
    // Client pool.
    final int port0 = vm0.invoke(() -> PdxQueryDUnitTest.getCacheServerPort());
    final int port1 = vm1.invoke(() -> PdxQueryDUnitTest.getCacheServerPort());
    final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
    // Create client pool.
    final String poolName = "testClientServerQueryPool";
    createPool(vm2, poolName, new String[] { host0 }, new int[] { port0 }, true);
    createPool(vm3, poolName, new String[] { host0 }, new int[] { port1 }, true);
    // Create client region
    vm3.invoke(new CacheSerializableRunnable("Create region") {

        public void run2() throws CacheException {
            AttributesFactory factory = new AttributesFactory();
            factory.setScope(Scope.LOCAL);
            ClientServerTestCase.configureConnectionPool(factory, host0, port1, -1, true, -1, -1, null);
            Region region1 = createRegion(regionName, rootRegionName, factory.create());
            Region region2 = createRegion(regionName2, rootRegionName, factory.create());
            for (int i = 0; i < numberOfEntries; i++) {
                region1.put("key-" + i, new PortfolioPdx(i, i));
                region2.put("key-" + i, new PortfolioPdx(i, i));
            }
        }
    });
    // Execute client queries
    SerializableRunnable executeQueries = new CacheSerializableRunnable("Execute queries") {

        public void run2() throws CacheException {
            QueryService remoteQueryService = null;
            QueryService localQueryService = null;
            SelectResults[][] rs = new SelectResults[1][2];
            try {
                remoteQueryService = (PoolManager.find(poolName)).getQueryService();
                localQueryService = getCache().getQueryService();
            } catch (Exception e) {
                Assert.fail("Failed to get QueryService.", e);
            }
            for (int i = 0; i < queries.length; i++) {
                try {
                    logger.info("### Executing Query on server:" + queries[i]);
                    Query query = remoteQueryService.newQuery(queries[i]);
                    rs[0][0] = (SelectResults) query.execute();
                    logger.info("### Executing Query locally:" + queries[i]);
                    query = localQueryService.newQuery(queries[i]);
                    rs[0][1] = (SelectResults) query.execute();
                    // Compare local and remote query results.
                    if (!CacheUtils.compareResultsOfWithAndWithoutIndex(rs)) {
                        fail("Local and Remote Query Results are not matching for query :" + queries[i]);
                    }
                } catch (Exception e) {
                    Assert.fail("Failed executing " + queries[i], e);
                }
            }
        }
    };
    vm3.invoke(executeQueries);
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            assertEquals(0, PortfolioPdx.numInstance);
            assertEquals(0, PositionPdx.numInstance);
        }
    });
    // Create index
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            Region region = getRootRegion().getSubregion(regionName);
            QueryService qs = getCache().getQueryService();
            try {
                qs.createIndex("pkIndex", IndexType.FUNCTIONAL, "portfolio.Pk", regName + " portfolio");
                qs.createIndex("secIdIndex", IndexType.FUNCTIONAL, "pos.secId", regName + " p, p.positions.values AS pos");
                qs.createIndex("tickerIndex", IndexType.FUNCTIONAL, "pf.position1.secId", regName + " pf");
                qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "pos11.secId", regName + " pf1, pf1.collectionHolderMap.values coll1, pf1.positions.values pos11");
                qs.createIndex("secIdIndexPf2", IndexType.FUNCTIONAL, "pos22.secId", regName2 + " pf2, pf2.collectionHolderMap.values coll2, pf2.positions.values pos22");
            } catch (Exception ex) {
                fail("Unable to create index. " + ex.getMessage());
            }
        }
    });
    vm3.invoke(executeQueries);
    // index is created on portfolio.Pk field which does not exists in
    // PorfolioPdx object
    // but there is a method getPk(), so for #44436, the objects are
    // deserialized to get the value in vm1
    vm1.invoke(new CacheSerializableRunnable("Create Bridge Server") {

        public void run2() throws CacheException {
            assertEquals(numberOfEntries, PortfolioPdx.numInstance);
            // 50 PorforlioPdx objects
            assertEquals(325, PositionPdx.numInstance);
        // create (50*3)+50+50+50+25
        // = 325 PositionPdx objects
        // when deserialized
        }
    });
    this.closeClient(vm2);
    this.closeClient(vm3);
    this.closeClient(vm1);
    this.closeClient(vm0);
}
Also used : Query(org.apache.geode.cache.query.Query) CacheException(org.apache.geode.cache.CacheException) SerializableRunnable(org.apache.geode.test.dunit.SerializableRunnable) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) Host(org.apache.geode.test.dunit.Host) PortfolioPdx(org.apache.geode.cache.query.data.PortfolioPdx) CacheException(org.apache.geode.cache.CacheException) AttributesFactory(org.apache.geode.cache.AttributesFactory) PartitionAttributesFactory(org.apache.geode.cache.PartitionAttributesFactory) SelectResults(org.apache.geode.cache.query.SelectResults) CacheSerializableRunnable(org.apache.geode.cache30.CacheSerializableRunnable) QueryService(org.apache.geode.cache.query.QueryService) VM(org.apache.geode.test.dunit.VM) Region(org.apache.geode.cache.Region) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest) Test(org.junit.Test)

Example 5 with PortfolioPdx

use of org.apache.geode.cache.query.data.PortfolioPdx in project geode by apache.

the class PdxLocalQueryDUnitTest method testLocalPdxQueriesVerifyNoDeserialization.

@Test
public void testLocalPdxQueriesVerifyNoDeserialization() throws Exception {
    final Host host = Host.getHost(0);
    final VM server1 = host.getVM(0);
    final VM server2 = host.getVM(1);
    final int numberOfEntries = 10;
    final String name = "/" + regionName;
    final String[] queries = { "select * from " + name + " where status = 'inactive'", "select p from " + name + " p where p.status = 'inactive'", "select * from " + name + " p, p.positions.values v where v.secId = 'IBM'", "select p.status from " + name + " p where p.status = 'inactive' or p.ID > 0", "select * from " + name + " p where p.status = 'inactive' and p.ID >= 0", "select p.status from " + name + " p where p.status in set ('inactive', 'active')", "select * from " + name + " p where p.ID > 0 and p.ID < 10" };
    // Start server1
    server1.invoke(new SerializableCallable("Create Server1") {

        @Override
        public Object call() throws Exception {
            Region r1 = getCache().createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
            for (int i = 0; i < numberOfEntries; i++) {
                PortfolioPdx p = new PortfolioPdx(i);
                r1.put("key-" + i, p);
            }
            return null;
        }
    });
    // Start server2
    server2.invoke(new SerializableCallable("Create Server2") {

        @Override
        public Object call() throws Exception {
            Region r1 = getCache().createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
            QueryService qs = null;
            SelectResults sr = null;
            // Execute query locally
            try {
                qs = getCache().getQueryService();
            } catch (Exception e) {
                Assert.fail("Failed to get QueryService.", e);
            }
            for (int i = 0; i < queries.length; i++) {
                try {
                    sr = (SelectResults) qs.newQuery(queries[i]).execute();
                    assertTrue("Size of resultset should be greater than 0 for query: " + queries[i], sr.size() > 0);
                } catch (Exception e) {
                    Assert.fail("Failed executing query " + queries[i], e);
                }
            }
            assertEquals("Unexpected number of objects deserialized ", 0, PortfolioPdx.numInstance);
            return null;
        }
    });
    this.closeClient(server1);
    this.closeClient(server2);
}
Also used : Host(org.apache.geode.test.dunit.Host) PortfolioPdx(org.apache.geode.cache.query.data.PortfolioPdx) PdxString(org.apache.geode.pdx.internal.PdxString) CacheException(org.apache.geode.cache.CacheException) SelectResults(org.apache.geode.cache.query.SelectResults) QueryService(org.apache.geode.cache.query.QueryService) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Region(org.apache.geode.cache.Region) Test(org.junit.Test) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Aggregations

PortfolioPdx (org.apache.geode.cache.query.data.PortfolioPdx)51 Region (org.apache.geode.cache.Region)50 Test (org.junit.Test)50 QueryService (org.apache.geode.cache.query.QueryService)47 SelectResults (org.apache.geode.cache.query.SelectResults)46 Query (org.apache.geode.cache.query.Query)34 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)28 VM (org.apache.geode.test.dunit.VM)27 DefaultQuery (org.apache.geode.cache.query.internal.DefaultQuery)26 Host (org.apache.geode.test.dunit.Host)26 CacheException (org.apache.geode.cache.CacheException)19 Struct (org.apache.geode.cache.query.Struct)17 SerializableCallable (org.apache.geode.test.dunit.SerializableCallable)14 AttributesFactory (org.apache.geode.cache.AttributesFactory)13 ClientCache (org.apache.geode.cache.client.ClientCache)13 ClientCacheFactory (org.apache.geode.cache.client.ClientCacheFactory)13 CompiledSelect (org.apache.geode.cache.query.internal.CompiledSelect)13 CacheSerializableRunnable (org.apache.geode.cache30.CacheSerializableRunnable)13 Iterator (java.util.Iterator)12 PartitionAttributesFactory (org.apache.geode.cache.PartitionAttributesFactory)12