use of org.apache.flink.shaded.guava30.com.google.common.cache.Cache in project cas by apereo.
the class CasPersonDirectoryConfiguration method composeMergedAndCachedAttributeRepositories.
private IPersonAttributeDao composeMergedAndCachedAttributeRepositories(final List<IPersonAttributeDao> list) {
final MergingPersonAttributeDaoImpl mergingDao = new MergingPersonAttributeDaoImpl();
final String merger = StringUtils.defaultIfBlank(casProperties.getAuthn().getAttributeRepository().getMerger(), "replace".trim());
LOGGER.debug("Configured merging strategy for attribute sources is [{}]", merger);
switch(merger.toLowerCase()) {
case "merge":
mergingDao.setMerger(new MultivaluedAttributeMerger());
break;
case "add":
mergingDao.setMerger(new NoncollidingAttributeAdder());
break;
case "replace":
default:
mergingDao.setMerger(new ReplacingAttributeAdder());
break;
}
final CachingPersonAttributeDaoImpl impl = new CachingPersonAttributeDaoImpl();
impl.setCacheNullResults(false);
final Cache graphs = CacheBuilder.newBuilder().concurrencyLevel(2).weakKeys().maximumSize(casProperties.getAuthn().getAttributeRepository().getMaximumCacheSize()).expireAfterWrite(casProperties.getAuthn().getAttributeRepository().getExpireInMinutes(), TimeUnit.MINUTES).build();
impl.setUserInfoCache(graphs.asMap());
mergingDao.setPersonAttributeDaos(list);
impl.setCachedPersonAttributesDao(mergingDao);
if (list.isEmpty()) {
LOGGER.debug("No attribute repository sources are available/defined to merge together.");
} else {
LOGGER.debug("Configured attribute repository sources to merge together: [{}]", list);
LOGGER.debug("Configured cache expiration policy for merging attribute sources to be [{}] minute(s)", casProperties.getAuthn().getAttributeRepository().getExpireInMinutes());
}
return impl;
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.Cache in project gerrit by GerritCodeReview.
the class CachesCollection method parse.
@Override
public CacheResource parse(ConfigResource parent, IdString id) throws AuthException, ResourceNotFoundException, PermissionBackendException {
permissionBackend.user(self).check(GlobalPermission.VIEW_CACHES);
String cacheName = id.get();
String pluginName = "gerrit";
int i = cacheName.lastIndexOf('-');
if (i != -1) {
pluginName = cacheName.substring(0, i);
cacheName = cacheName.length() > i + 1 ? cacheName.substring(i + 1) : "";
}
Provider<Cache<?, ?>> cacheProvider = cacheMap.byPlugin(pluginName).get(cacheName);
if (cacheProvider == null) {
throw new ResourceNotFoundException(id);
}
return new CacheResource(pluginName, cacheName, cacheProvider);
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.Cache in project gerrit by GerritCodeReview.
the class NoteDbModule method configure.
@Override
public void configure() {
factory(ChangeUpdate.Factory.class);
factory(ChangeDraftUpdate.Factory.class);
factory(DeleteCommentRewriter.Factory.class);
factory(DraftCommentNotes.Factory.class);
factory(RobotCommentUpdate.Factory.class);
factory(RobotCommentNotes.Factory.class);
factory(NoteDbUpdateManager.Factory.class);
if (!useTestBindings) {
install(ChangeNotesCache.module());
if (cfg.getBoolean("noteDb", null, "testRebuilderWrapper", false)) {
// Yes, another variety of test bindings with a different way of
// configuring it.
bind(ChangeRebuilder.class).to(TestChangeRebuilderWrapper.class);
} else {
bind(ChangeRebuilder.class).to(ChangeRebuilderImpl.class);
}
} else {
bind(ChangeRebuilder.class).toInstance(new ChangeRebuilder(null) {
@Override
public Result rebuild(ReviewDb db, Change.Id changeId) {
return null;
}
@Override
public Result rebuildEvenIfReadOnly(ReviewDb db, Id changeId) {
return null;
}
@Override
public Result rebuild(NoteDbUpdateManager manager, ChangeBundle bundle) {
return null;
}
@Override
public NoteDbUpdateManager stage(ReviewDb db, Change.Id changeId) {
return null;
}
@Override
public Result execute(ReviewDb db, Change.Id changeId, NoteDbUpdateManager manager) {
return null;
}
@Override
public void buildUpdates(NoteDbUpdateManager manager, ChangeBundle bundle) {
// Do nothing.
}
@Override
public void rebuildReviewDb(ReviewDb db, Project.NameKey project, Id changeId) {
// Do nothing.
}
});
bind(new TypeLiteral<Cache<ChangeNotesCache.Key, ChangeNotesState>>() {
}).annotatedWith(Names.named(ChangeNotesCache.CACHE_NAME)).toInstance(CacheBuilder.newBuilder().<ChangeNotesCache.Key, ChangeNotesState>build());
}
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.Cache in project apollo by ctripcorp.
the class ConfigFileControllerTest method testHandleMessage.
@Test
public void testHandleMessage() throws Exception {
String someWatchKey = "someWatchKey";
String anotherWatchKey = "anotherWatchKey";
String someCacheKey = "someCacheKey";
String anotherCacheKey = "anotherCacheKey";
String someValue = "someValue";
ReleaseMessage someReleaseMessage = mock(ReleaseMessage.class);
when(someReleaseMessage.getMessage()).thenReturn(someWatchKey);
Cache<String, String> cache = (Cache<String, String>) ReflectionTestUtils.getField(configFileController, "localCache");
cache.put(someCacheKey, someValue);
cache.put(anotherCacheKey, someValue);
watchedKeys2CacheKey.putAll(someWatchKey, Lists.newArrayList(someCacheKey, anotherCacheKey));
watchedKeys2CacheKey.putAll(anotherWatchKey, Lists.newArrayList(someCacheKey, anotherCacheKey));
cacheKey2WatchedKeys.putAll(someCacheKey, Lists.newArrayList(someWatchKey, anotherWatchKey));
cacheKey2WatchedKeys.putAll(anotherCacheKey, Lists.newArrayList(someWatchKey, anotherWatchKey));
configFileController.handleMessage(someReleaseMessage, Topics.APOLLO_RELEASE_TOPIC);
assertTrue(watchedKeys2CacheKey.isEmpty());
assertTrue(cacheKey2WatchedKeys.isEmpty());
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.Cache in project bookkeeper by apache.
the class TestFileInfoBackingCache method testForDeadlocks.
/**
* Of course this can't prove they don't exist, but
* try to shake them out none the less.
*/
@Test
public void testForDeadlocks() throws Exception {
int numRunners = 20;
int maxLedgerId = 10;
AtomicBoolean done = new AtomicBoolean(false);
FileInfoBackingCache cache = new FileInfoBackingCache((ledgerId, createIfNotFound) -> {
File f = new File(baseDir, String.valueOf(ledgerId));
f.deleteOnExit();
return f;
});
Iterable<Future<Set<CachedFileInfo>>> futures = IntStream.range(0, numRunners).mapToObj((i) -> {
Callable<Set<CachedFileInfo>> c = () -> {
Random r = new Random();
List<CachedFileInfo> fileInfos = new ArrayList<>();
Set<CachedFileInfo> allFileInfos = new HashSet<>();
while (!done.get()) {
if (r.nextBoolean() && fileInfos.size() < 5) {
// take a reference
CachedFileInfo fi = cache.loadFileInfo(r.nextInt(maxLedgerId), masterKey);
Assert.assertFalse(fi.isClosed());
allFileInfos.add(fi);
fileInfos.add(fi);
} else {
// release a reference
Collections.shuffle(fileInfos);
if (!fileInfos.isEmpty()) {
fileInfos.remove(0).release();
}
}
}
for (CachedFileInfo fi : fileInfos) {
Assert.assertFalse(fi.isClosed());
fi.release();
}
return allFileInfos;
};
return executor.submit(c);
}).collect(Collectors.toList());
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
done.set(true);
// ensure all threads are finished operating on cache, before checking any
for (Future<Set<CachedFileInfo>> f : futures) {
f.get();
}
for (Future<Set<CachedFileInfo>> f : futures) {
for (CachedFileInfo fi : f.get()) {
Assert.assertTrue(fi.isClosed());
Assert.assertEquals(FileInfoBackingCache.DEAD_REF, fi.getRefCount());
}
}
// They should be loaded fresh (i.e. this load should be only reference)
for (int i = 0; i < maxLedgerId; i++) {
Assert.assertEquals(1, cache.loadFileInfo(i, masterKey).getRefCount());
}
}
Aggregations