use of javax.cache.Cache in project hazelcast by hazelcast.
the class CacheThroughHazelcastInstanceTest method whenThereIsCacheConfigWithURIandCreatedByCacheManager_thenReturnsSameCache.
private void whenThereIsCacheConfigWithURIandCreatedByCacheManager_thenReturnsSameCache(boolean getCache) throws Exception {
HazelcastInstance instance = createInstance();
CachingProvider cachingProvider = createCachingProvider(instance);
Properties properties = HazelcastCachingProvider.propertiesByInstanceItself(instance);
CacheManager cacheManager = cachingProvider.getCacheManager(new URI("MY-URI"), null, properties);
Cache cache1 = cacheManager.createCache(CACHE_NAME, createCacheConfig(CACHE_NAME));
assertNotNull(cache1);
Cache cache2 = retrieveCache(instance, "MY-URI/" + CACHE_NAME, getCache);
assertNotNull(cache2);
// verify that they are same cache instance
assertTrue(cache1 == cache2);
}
use of javax.cache.Cache in project hazelcast by hazelcast.
the class CacheThroughHazelcastInstanceTest method whenThereIsCacheConfigWithSameNameButDifferentFullNameAndCreatedByCacheManager_thenFail.
private void whenThereIsCacheConfigWithSameNameButDifferentFullNameAndCreatedByCacheManager_thenFail(boolean getCache) throws Exception {
HazelcastInstance instance = createInstance();
CachingProvider cachingProvider = createCachingProvider(instance);
Properties properties = HazelcastCachingProvider.propertiesByInstanceItself(instance);
CacheManager cacheManager = cachingProvider.getCacheManager(new URI("MY-URI"), null, properties);
Cache cache1 = cacheManager.createCache(CACHE_NAME, createCacheConfig(CACHE_NAME));
assertNotNull(cache1);
retrieveCache(instance, getCache);
}
use of javax.cache.Cache in project pratilipi by Pratilipi.
the class MemcacheGaeImpl method flush.
@Override
public void flush() {
try {
Cache cache = CacheManager.getInstance().getCacheFactory().createCache(Collections.emptyMap());
cache.clear();
} catch (CacheException ex) {
logger.log(Level.SEVERE, "Failed to create cache instance.", ex);
}
}
use of javax.cache.Cache in project ignite by apache.
the class CacheContinuousQueryExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
* @throws Exception If example execution failed.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println();
System.out.println(">>> Cache continuous query example started.");
// Auto-close cache at the end of the example.
try (IgniteCache<Integer, String> cache = ignite.getOrCreateCache(CACHE_NAME)) {
int keyCnt = 20;
// These entries will be queried by initial predicate.
for (int i = 0; i < keyCnt; i++) cache.put(i, Integer.toString(i));
// Create new continuous query.
ContinuousQuery<Integer, String> qry = new ContinuousQuery<>();
qry.setInitialQuery(new ScanQuery<>(new IgniteBiPredicate<Integer, String>() {
@Override
public boolean apply(Integer key, String val) {
return key > 10;
}
}));
// Callback that is called locally when update notifications are received.
qry.setLocalListener(new CacheEntryUpdatedListener<Integer, String>() {
@Override
public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends String>> evts) {
for (CacheEntryEvent<? extends Integer, ? extends String> e : evts) System.out.println("Updated entry [key=" + e.getKey() + ", val=" + e.getValue() + ']');
}
});
// This filter will be evaluated remotely on all nodes.
// Entry that pass this filter will be sent to the caller.
qry.setRemoteFilterFactory(new Factory<CacheEntryEventFilter<Integer, String>>() {
@Override
public CacheEntryEventFilter<Integer, String> create() {
return new CacheEntryEventFilter<Integer, String>() {
@Override
public boolean evaluate(CacheEntryEvent<? extends Integer, ? extends String> e) {
return e.getKey() > 10;
}
};
}
});
// Execute query.
try (QueryCursor<Cache.Entry<Integer, String>> cur = cache.query(qry)) {
// Iterate through existing data.
for (Cache.Entry<Integer, String> e : cur) System.out.println("Queried existing entry [key=" + e.getKey() + ", val=" + e.getValue() + ']');
// Add a few more keys and watch more query notifications.
for (int i = keyCnt; i < keyCnt + 10; i++) cache.put(i, Integer.toString(i));
// Wait for a while while callback is notified about remaining puts.
Thread.sleep(2000);
}
} finally {
// Distributed cache could be removed from cluster only by #destroyCache() call.
ignite.destroyCache(CACHE_NAME);
}
}
}
use of javax.cache.Cache in project ignite by apache.
the class CacheAbstractJdbcStore method writeAll.
/** {@inheritDoc} */
@Override
public void writeAll(final Collection<Cache.Entry<? extends K, ? extends V>> entries) throws CacheWriterException {
assert entries != null;
Connection conn = null;
try {
conn = connection();
String cacheName = session().cacheName();
Object currKeyTypeId = null;
if (dialect.hasMerge()) {
PreparedStatement mergeStmt = null;
try {
EntryMapping em = null;
LazyValue<Object[]> lazyEntries = new LazyValue<Object[]>() {
@Override
public Object[] create() {
return entries.toArray();
}
};
int fromIdx = 0, prepared = 0;
for (Cache.Entry<? extends K, ? extends V> entry : entries) {
K key = entry.getKey();
Object keyTypeId = typeIdForObject(key);
em = entryMapping(cacheName, keyTypeId);
if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) {
if (mergeStmt != null) {
if (log.isDebugEnabled())
log.debug("Write entries to db [cache=" + U.maskName(cacheName) + ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
U.closeQuiet(mergeStmt);
}
mergeStmt = conn.prepareStatement(em.mergeQry);
currKeyTypeId = keyTypeId;
fromIdx += prepared;
prepared = 0;
}
int idx = fillKeyParameters(mergeStmt, em, key);
fillValueParameters(mergeStmt, idx, em, entry.getValue());
mergeStmt.addBatch();
if (++prepared % batchSize == 0) {
if (log.isDebugEnabled())
log.debug("Write entries to db [cache=" + U.maskName(cacheName) + ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
fromIdx += prepared;
prepared = 0;
}
}
if (mergeStmt != null && prepared % batchSize != 0) {
if (log.isDebugEnabled())
log.debug("Write entries to db [cache=" + U.maskName(cacheName) + ", keyType=" + em.keyType() + ", cnt=" + prepared + "]");
executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries);
}
} finally {
U.closeQuiet(mergeStmt);
}
} else {
if (log.isDebugEnabled())
log.debug("Write entries to db one by one using update and insert statements " + "[cache=" + U.maskName(cacheName) + ", cnt=" + entries.size() + "]");
PreparedStatement insStmt = null;
PreparedStatement updStmt = null;
try {
for (Cache.Entry<? extends K, ? extends V> entry : entries) {
K key = entry.getKey();
Object keyTypeId = typeIdForObject(key);
EntryMapping em = entryMapping(cacheName, keyTypeId);
if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) {
U.closeQuiet(insStmt);
insStmt = conn.prepareStatement(em.insQry);
U.closeQuiet(updStmt);
updStmt = conn.prepareStatement(em.updQry);
currKeyTypeId = keyTypeId;
}
writeUpsert(insStmt, updStmt, em, entry);
}
} finally {
U.closeQuiet(insStmt);
U.closeQuiet(updStmt);
}
}
} catch (SQLException e) {
throw new CacheWriterException("Failed to write entries in database", e);
} finally {
closeConnection(conn);
}
}
Aggregations