use of org.apache.geode.cache.query.internal.Undefined in project geode by apache.
the class DSFIDFactory method readUndefined.
private static Undefined readUndefined(DataInput in) throws IOException, ClassNotFoundException {
Undefined o = (Undefined) QueryService.UNDEFINED;
InternalDataSerializer.invokeFromData(o, in);
return o;
}
use of org.apache.geode.cache.query.internal.Undefined in project geode by apache.
the class InitializeIndexEntryDestroyQueryDUnitTest method testConcurrentRemoveIndexAndQueryOnPR.
@Test
public void testConcurrentRemoveIndexAndQueryOnPR() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
setCacheInVMs(vm0);
name = "PartionedPortfoliosPR";
// Create Local Region
vm0.invoke(new CacheSerializableRunnable("Create local region with asynchronous index maintenance") {
@Override
public void run2() throws CacheException {
Cache cache = getCache();
Region partitionRegion = null;
try {
AttributesFactory attr = new AttributesFactory();
attr.setValueConstraint(PortfolioData.class);
attr.setIndexMaintenanceSynchronous(false);
attr.setPartitionAttributes(new PartitionAttributesFactory().create());
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
assertTrue("Region ref claims to be destroyed", !partitionRegion.isDestroyed());
}
});
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the PR's created
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
vm0.invoke(new CacheSerializableRunnable("Create Index") {
@Override
public void run2() throws CacheException {
// Create Index first to go in hook.
Cache cache = getCache();
Index sindex = null;
Index iindex = null;
Index pkindex = null;
try {
sindex = cache.getQueryService().createIndex("statusIndex", "p.status", "/" + name + " p");
iindex = cache.getQueryService().createIndex("idIndex", "p.ID", "/" + name + " p");
pkindex = cache.getQueryService().createIndex("pkidIndex", "p.pk", "/" + name + " p");
} catch (Exception e1) {
e1.printStackTrace();
fail("Index creation failed");
}
assertNotNull(sindex);
assertNotNull(iindex);
assertNotNull(pkindex);
}
});
vm0.invoke(new CacheSerializableRunnable("Run query on region") {
@Override
public void run2() throws CacheException {
// Do a put in region.
Query query = getCache().getQueryService().newQuery("select * from /" + name + " p where p.status = 'active' and p.ID > 0 and p.pk != ' ' ");
// Now run the query
SelectResults results = null;
for (int i = 0; i < 10; i++) {
try {
getCache().getLogger().fine("Querying the region with " + query);
results = (SelectResults) query.execute();
} catch (Exception e) {
Assert.fail("Query: " + query + " execution failed with exception", e);
}
for (Object obj : results) {
if (obj instanceof Undefined) {
fail("Found an undefined element" + Arrays.toString(results.toArray()));
}
}
}
}
});
vm0.invoke(new CacheSerializableRunnable("Create Index") {
@Override
public void run2() throws CacheException {
Region r = getCache().getRegion(name);
// Create Index first to go in hook.
getCache().getQueryService().removeIndexes(r);
}
});
}
use of org.apache.geode.cache.query.internal.Undefined in project geode by apache.
the class PdxQueryDUnitTest method testPdxInstanceNoFieldNoMethod.
/**
* Test to query a field that is not present in the Pdx object Also the implicit method is absent
* in the class
*
* @throws CacheException
*/
@Test
public void testPdxInstanceNoFieldNoMethod() throws CacheException {
final Host host = Host.getHost(0);
final VM vm0 = host.getVM(0);
final VM vm3 = host.getVM(3);
final int numberOfEntries = 10;
final String name = "/" + regionName;
final String[] qs = { "select * from " + name + " where pdxStatus = 'active'", "select pdxStatus from " + name + " where id > 4" };
// Start server1
final int port1 = (Integer) vm0.invoke(new SerializableCallable("Create Server1") {
@Override
public Object call() throws Exception {
Region r1 = getCache().createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
CacheServer server = getCache().addCacheServer();
int port = AvailablePortHelper.getRandomAvailablePortForDUnitSite();
server.setPort(port);
server.start();
return port;
}
});
// create client and load only version 1 objects with no pdxStatus field
vm3.invoke(new SerializableCallable("Create client") {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
cf.addPoolServer(NetworkUtils.getServerHostName(vm0.getHost()), port1);
ClientCache cache = getClientCache(cf);
Region region = cache.createClientRegionFactory(ClientRegionShortcut.PROXY).create(regionName);
// Load version 1 objects
for (int i = 0; i < numberOfEntries; i++) {
PdxInstanceFactory pdxInstanceFactory = PdxInstanceFactoryImpl.newCreator("PdxVersionedNewPortfolio", false);
pdxInstanceFactory.writeInt("id", i);
pdxInstanceFactory.writeString("status", (i % 2 == 0 ? "active" : "inactive"));
PdxInstance pdxInstance = pdxInstanceFactory.create();
region.put("key-" + i, pdxInstance);
}
return null;
}
});
// Version1 class loader
vm3.invoke(new SerializableCallable("Create client") {
@Override
public Object call() throws Exception {
// Load version 1 classloader
QueryService remoteQueryService = null;
// Execute query remotely
try {
remoteQueryService = getCache().getQueryService();
} catch (Exception e) {
Assert.fail("Failed to get QueryService.", e);
}
for (int i = 0; i < qs.length; i++) {
try {
SelectResults sr = (SelectResults) remoteQueryService.newQuery(qs[i]).execute();
if (i == 1) {
assertEquals(5, sr.size());
for (Object o : sr) {
if (!(o instanceof Undefined)) {
fail("Result should be Undefined and not " + o.getClass());
}
}
} else {
assertEquals(0, sr.size());
}
} catch (Exception e) {
Assert.fail("Failed executing " + qs[i], e);
}
}
return null;
}
});
Invoke.invokeInEveryVM(DistributedTestCase.class, "disconnectFromDS");
}
use of org.apache.geode.cache.query.internal.Undefined in project geode by apache.
the class InitializeIndexEntryDestroyQueryDUnitTest method testAsyncIndexInitDuringEntryDestroyAndQuery.
@Test
public void testAsyncIndexInitDuringEntryDestroyAndQuery() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
setCacheInVMs(vm0);
name = "PartionedPortfolios";
// Create Local Region
vm0.invoke(new CacheSerializableRunnable("Create local region with asynchronous index maintenance") {
@Override
public void run2() throws CacheException {
Cache cache = getCache();
Region localRegion = null;
try {
AttributesFactory attr = new AttributesFactory();
attr.setValueConstraint(PortfolioData.class);
attr.setScope(Scope.LOCAL);
attr.setIndexMaintenanceSynchronous(false);
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
localRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", localRegion);
assertTrue("Region ref claims to be destroyed", !localRegion.isDestroyed());
}
});
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the PR's created
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
AsyncInvocation asyInvk0 = vm0.invokeAsync(new CacheSerializableRunnable("Create Index with Hook") {
@Override
public void run2() throws CacheException {
for (int i = 0; i < cntDest; i++) {
// Create Index first to go in hook.
Cache cache = getCache();
Index index = null;
try {
index = cache.getQueryService().createIndex("statusIndex", "p.status", "/" + name + " p");
} catch (Exception e1) {
e1.printStackTrace();
fail("Index creation failed");
}
assertNotNull(index);
Wait.pause(100);
getCache().getQueryService().removeIndex(index);
Wait.pause(100);
}
}
});
// Change the value in Region
AsyncInvocation asyInvk1 = vm0.invokeAsync(new CacheSerializableRunnable("Change value in region") {
@Override
public void run2() throws CacheException {
// Do a put in region.
Region r = getCache().getRegion(name);
for (int i = 0, j = 0; i < 1000; i++, j++) {
PortfolioData p = (PortfolioData) r.get(j);
getCache().getLogger().fine("Going to destroy the value" + p);
r.destroy(j);
Wait.pause(100);
// Put the value back again.
getCache().getLogger().fine("Putting the value back" + p);
r.put(j, p);
// Reset j
if (j == cntDest - 1) {
j = 0;
}
}
}
});
vm0.invoke(new CacheSerializableRunnable("Run query on region") {
@Override
public void run2() throws CacheException {
// Do a put in region.
Region r = getCache().getRegion(name);
Query query = getCache().getQueryService().newQuery("select * from /" + name + " p where p.status = 'active'");
// Now run the query
SelectResults results = null;
for (int i = 0; i < 500; i++) {
try {
getCache().getLogger().fine("Querying the region");
results = (SelectResults) query.execute();
} catch (Exception e) {
e.printStackTrace();
}
for (Object obj : results) {
if (obj instanceof Undefined) {
fail("Found an undefined element" + Arrays.toString(results.toArray()));
}
}
}
}
});
ThreadUtils.join(asyInvk0, 1000 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
ThreadUtils.join(asyInvk1, 1000 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
}
use of org.apache.geode.cache.query.internal.Undefined in project geode by apache.
the class InitializeIndexEntryDestroyQueryDUnitTest method testAsyncIndexInitDuringEntryDestroyAndQueryOnPR.
// GEODE-1036: uses PRQueryDUnitHelper, time sensitive, async actions,
@Category(FlakyTest.class)
// sleeps
@Test
public void testAsyncIndexInitDuringEntryDestroyAndQueryOnPR() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
setCacheInVMs(vm0);
name = "PartionedPortfoliosPR";
// Create Local Region
vm0.invoke(new CacheSerializableRunnable("Create local region with asynchronous index maintenance") {
@Override
public void run2() throws CacheException {
Cache cache = getCache();
Region partitionRegion = null;
try {
AttributesFactory attr = new AttributesFactory();
attr.setValueConstraint(PortfolioData.class);
attr.setIndexMaintenanceSynchronous(false);
attr.setPartitionAttributes(new PartitionAttributesFactory().create());
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
assertTrue("Region ref claims to be destroyed", !partitionRegion.isDestroyed());
}
});
final PortfolioData[] portfolio = createPortfolioData(cnt, cntDest);
// Putting the data into the PR's created
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio, cnt, cntDest));
AsyncInvocation asyInvk0 = vm0.invokeAsync(new CacheSerializableRunnable("Create Index with Hook") {
@Override
public void run2() throws CacheException {
for (int i = 0; i < cntDest; i++) {
// Create Index first to go in hook.
Cache cache = getCache();
Index index = null;
try {
index = cache.getQueryService().createIndex("statusIndex", "p.status", "/" + name + " p");
} catch (Exception e1) {
e1.printStackTrace();
Assert.fail("Index creation failed", e1);
}
assertNotNull(index);
getCache().getQueryService().removeIndex(index);
}
}
});
// Change the value in Region
AsyncInvocation asyInvk1 = vm0.invokeAsync(new CacheSerializableRunnable("Change value in region") {
@Override
public void run2() throws CacheException {
// Do a put in region.
Region r = getCache().getRegion(name);
for (int i = 0, j = 0; i < 1000; i++, j++) {
PortfolioData p = (PortfolioData) r.get(j);
getCache().getLogger().fine("Going to destroy the value" + p);
r.destroy(j);
Wait.pause(20);
// Put the value back again.
getCache().getLogger().fine("Putting the value back" + p);
r.put(j, p);
// Reset j
if (j == cntDest - 1) {
j = 0;
}
}
}
});
vm0.invoke(new CacheSerializableRunnable("Run query on region") {
@Override
public void run2() throws CacheException {
// Do a put in region.
Query query = getCache().getQueryService().newQuery("select * from /" + name + " p where p.status = 'active'");
// Now run the query
SelectResults results = null;
for (int i = 0; i < 500; i++) {
try {
getCache().getLogger().fine("Querying the region");
results = (SelectResults) query.execute();
} catch (Exception e) {
// TODO: eats exceptions
e.printStackTrace();
}
for (Object obj : results) {
if (obj instanceof Undefined) {
fail("Found an undefined element" + Arrays.toString(results.toArray()));
}
}
}
}
});
// TODO: this is way too long: 16.67 minutes!
ThreadUtils.join(asyInvk0, 1000 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
// TODO: this is way too long: 16.67 minutes!
ThreadUtils.join(asyInvk1, 1000 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
}
Aggregations