use of com.google.common.cache.Cache in project uPortal by Jasig.
the class SearchPortletController method getPortalSearchResults.
/**
* Get the {@link PortalSearchResults} for the specified query id from the session. If there are
* no results null is returned.
*/
private PortalSearchResults getPortalSearchResults(PortletRequest request, String queryId) {
final PortletSession session = request.getPortletSession();
@SuppressWarnings("unchecked") final Cache<String, PortalSearchResults> searchResultsCache = (Cache<String, PortalSearchResults>) session.getAttribute(SEARCH_RESULTS_CACHE_NAME);
if (searchResultsCache == null) {
return null;
}
return searchResultsCache.getIfPresent(queryId);
}
use of com.google.common.cache.Cache in project qpid-broker-j by apache.
the class CacheFactoryTest method getCache.
@Test
public void getCache() {
String cacheName = "test";
final Cache<Object, Object> cache = new NullCache<>();
final CacheProvider virtualHost = mock(CacheProvider.class, withSettings().extraInterfaces(VirtualHost.class));
when(virtualHost.getNamedCache(cacheName)).thenReturn(cache);
final Subject subject = new Subject();
subject.getPrincipals().add(new VirtualHostPrincipal((VirtualHost<?>) virtualHost));
subject.setReadOnly();
Cache<String, String> actualCache = Subject.doAs(subject, (PrivilegedAction<Cache<String, String>>) () -> CacheFactory.getCache(cacheName, null));
assertSame(actualCache, cache);
verify(virtualHost).getNamedCache(cacheName);
}
use of com.google.common.cache.Cache in project pravega by pravega.
the class BaseMetadataStore method writeToMetadataStore.
/**
* Writes modified values to the metadata store.
*/
private CompletableFuture<Void> writeToMetadataStore(boolean lazyWrite, ArrayList<TransactionData> modifiedValues) {
if (!lazyWrite || (bufferCount.get() > maxEntriesInTxnBuffer)) {
log.trace("Persisting all modified keys (except pinned)");
val toWriteList = modifiedValues.stream().filter(entry -> !entry.isPinned()).collect(Collectors.toList());
if (toWriteList.size() > 0) {
return writeAll(toWriteList).thenRunAsync(() -> {
log.trace("Done persisting all modified keys");
for (val writtenData : toWriteList) {
// Mark written keys as persisted.
writtenData.setPersisted(true);
// Put it in cache.
cache.put(writtenData.getKey(), writtenData);
}
}, executor);
} else {
return CompletableFuture.completedFuture(null);
}
}
return CompletableFuture.completedFuture(null);
}
use of com.google.common.cache.Cache in project sofa-ark by alipay.
the class BizClassLoaderTest method getUrlResourceCache.
private Cache<String, Optional<URL>> getUrlResourceCache(Object classloader) throws NoSuchFieldException, IllegalAccessException {
Field field = AbstractClasspathClassLoader.class.getDeclaredField("urlResourceCache");
field.setAccessible(true);
return (Cache<String, Optional<URL>>) field.get(classloader);
}
use of com.google.common.cache.Cache in project cas by apereo.
the class CasPersonDirectoryConfiguration method composeMergedAndCachedAttributeRepositories.
private IPersonAttributeDao composeMergedAndCachedAttributeRepositories(final List<IPersonAttributeDao> list) {
final MergingPersonAttributeDaoImpl mergingDao = new MergingPersonAttributeDaoImpl();
final String merger = StringUtils.defaultIfBlank(casProperties.getAuthn().getAttributeRepository().getMerger(), "replace".trim());
LOGGER.debug("Configured merging strategy for attribute sources is [{}]", merger);
switch(merger.toLowerCase()) {
case "merge":
mergingDao.setMerger(new MultivaluedAttributeMerger());
break;
case "add":
mergingDao.setMerger(new NoncollidingAttributeAdder());
break;
case "replace":
default:
mergingDao.setMerger(new ReplacingAttributeAdder());
break;
}
final CachingPersonAttributeDaoImpl impl = new CachingPersonAttributeDaoImpl();
impl.setCacheNullResults(false);
final Cache graphs = CacheBuilder.newBuilder().concurrencyLevel(2).weakKeys().maximumSize(casProperties.getAuthn().getAttributeRepository().getMaximumCacheSize()).expireAfterWrite(casProperties.getAuthn().getAttributeRepository().getExpireInMinutes(), TimeUnit.MINUTES).build();
impl.setUserInfoCache(graphs.asMap());
mergingDao.setPersonAttributeDaos(list);
impl.setCachedPersonAttributesDao(mergingDao);
if (list.isEmpty()) {
LOGGER.debug("No attribute repository sources are available/defined to merge together.");
} else {
LOGGER.debug("Configured attribute repository sources to merge together: [{}]", list);
LOGGER.debug("Configured cache expiration policy for merging attribute sources to be [{}] minute(s)", casProperties.getAuthn().getAttributeRepository().getExpireInMinutes());
}
return impl;
}
Aggregations