use of com.google.common.cache.Cache in project gerrit by GerritCodeReview.
the class CacheModule method cache.
/**
* Declare a named in-memory cache.
*
* @param <K> type of key used to lookup entries.
* @param <V> type of value stored by the cache.
* @return binding to describe the cache.
*/
protected <K, V> CacheBinding<K, V> cache(String name, TypeLiteral<K> keyType, TypeLiteral<V> valType) {
Type type = Types.newParameterizedType(Cache.class, keyType.getType(), valType.getType());
@SuppressWarnings("unchecked") Key<Cache<K, V>> key = (Key<Cache<K, V>>) Key.get(type, Names.named(name));
CacheProvider<K, V> m = new CacheProvider<>(this, name, keyType, valType);
bind(key).toProvider(m).asEagerSingleton();
bind(ANY_CACHE).annotatedWith(Exports.named(name)).to(key);
return m.maximumWeight(1024);
}
use of com.google.common.cache.Cache in project bookkeeper by apache.
the class TestFileInfoBackingCache method testRefCountRace.
@Test
public void testRefCountRace() throws Exception {
AtomicBoolean done = new AtomicBoolean(false);
FileInfoBackingCache cache = new FileInfoBackingCache((ledgerId, createIfNotFound) -> {
File f = new File(baseDir, String.valueOf(ledgerId));
f.deleteOnExit();
return f;
});
Iterable<Future<Set<CachedFileInfo>>> futures = IntStream.range(0, 2).mapToObj((i) -> {
Callable<Set<CachedFileInfo>> c = () -> {
Set<CachedFileInfo> allFileInfos = new HashSet<>();
while (!done.get()) {
CachedFileInfo fi = cache.loadFileInfo(1, masterKey);
Assert.assertFalse(fi.isClosed());
allFileInfos.add(fi);
fi.release();
}
return allFileInfos;
};
return executor.submit(c);
}).collect(Collectors.toList());
Thread.sleep(TimeUnit.SECONDS.toMillis(10));
done.set(true);
// ensure all threads are finished operating on cache, before checking any
for (Future<Set<CachedFileInfo>> f : futures) {
f.get();
}
for (Future<Set<CachedFileInfo>> f : futures) {
for (CachedFileInfo fi : f.get()) {
Assert.assertTrue(fi.isClosed());
Assert.assertEquals(FileInfoBackingCache.DEAD_REF, fi.getRefCount());
}
}
}
use of com.google.common.cache.Cache in project metasfresh-webui-api by metasfresh.
the class ADProcessInstancesRepository method createNewProcessInstance0.
/**
* @param request
* @param shadowParentDocumentEvaluatee optional shadowParentDocumentEvaluatee which will be
* @return
*/
private IProcessInstanceController createNewProcessInstance0(@NonNull final CreateProcessInstanceRequest request, @Nullable final IDocumentEvaluatee evalCtx) {
//
// Save process info together with it's parameters and get the the newly created AD_PInstance_ID
final ProcessInfo processInfo = createProcessInfo(request);
Services.get(IADPInstanceDAO.class).saveProcessInfo(processInfo);
final DocumentId adPInstanceId = DocumentId.of(processInfo.getAD_PInstance_ID());
final Object processClassInstance = processInfo.newProcessClassInstanceOrNull();
try (final IAutoCloseable c = JavaProcess.temporaryChangeCurrentInstance(processClassInstance)) {
//
// Build the parameters document
final ProcessDescriptor processDescriptor = getProcessDescriptor(request.getProcessId());
final DocumentEntityDescriptor parametersDescriptor = processDescriptor.getParametersDescriptor();
final Document parametersDoc = ADProcessParametersRepository.instance.createNewParametersDocument(parametersDescriptor, adPInstanceId, evalCtx);
final int windowNo = parametersDoc.getWindowNo();
// Set parameters's default values
ProcessDefaultParametersUpdater.newInstance().addDefaultParametersProvider(processClassInstance instanceof IProcessDefaultParametersProvider ? (IProcessDefaultParametersProvider) processClassInstance : null).onDefaultValue((parameter, value) -> parametersDoc.processValueChange(parameter.getColumnName(), value, () -> "default parameter value")).updateDefaultValue(parametersDoc.getFieldViews(), field -> DocumentFieldAsProcessDefaultParameter.of(windowNo, field));
//
// Create (webui) process instance and add it to our internal cache.
final ADProcessInstanceController pinstance = ADProcessInstanceController.builder().caption(processDescriptor.getCaption()).instanceId(adPInstanceId).parameters(parametersDoc).processClassInstance(processClassInstance).contextSingleDocumentPath(request.getSingleDocumentPath()).viewId(request.getViewRowIdsSelection() != null ? request.getViewRowIdsSelection().getViewId() : null).build();
processInstances.put(adPInstanceId, pinstance.copyReadonly());
return pinstance;
}
}
use of com.google.common.cache.Cache in project presto by prestodb.
the class AbstractTestParquetReader method testCaching.
@Test
public void testCaching() throws Exception {
Cache<ParquetDataSourceId, ParquetFileMetadata> parquetFileMetadataCache = CacheBuilder.newBuilder().maximumWeight(new DataSize(1, MEGABYTE).toBytes()).weigher((id, metadata) -> ((ParquetFileMetadata) metadata).getMetadataSize()).expireAfterAccess(new Duration(10, MINUTES).toMillis(), MILLISECONDS).recordStats().build();
ParquetMetadataSource parquetMetadataSource = new CachingParquetMetadataSource(parquetFileMetadataCache, new MetadataReader());
try (ParquetTester.TempFile tempFile = new ParquetTester.TempFile("test", "parquet")) {
Iterable<Integer> values = intsBetween(0, 10);
Iterator<?>[] readValues = stream(new Iterable<?>[] { values }).map(Iterable::iterator).toArray(size -> new Iterator<?>[size]);
List<String> columnNames = singletonList("column1");
List<Type> columnTypes = singletonList(INTEGER);
writeParquetFileFromPresto(tempFile.getFile(), columnTypes, columnNames, readValues, 10, CompressionCodecName.GZIP);
testSingleRead(new Iterable<?>[] { values }, columnNames, columnTypes, parquetMetadataSource, tempFile.getFile());
assertEquals(parquetFileMetadataCache.stats().missCount(), 1);
assertEquals(parquetFileMetadataCache.stats().hitCount(), 0);
testSingleRead(new Iterable<?>[] { values }, columnNames, columnTypes, parquetMetadataSource, tempFile.getFile());
assertEquals(parquetFileMetadataCache.stats().missCount(), 1);
assertEquals(parquetFileMetadataCache.stats().hitCount(), 1);
testSingleRead(new Iterable<?>[] { values }, columnNames, columnTypes, parquetMetadataSource, tempFile.getFile());
assertEquals(parquetFileMetadataCache.stats().missCount(), 1);
assertEquals(parquetFileMetadataCache.stats().hitCount(), 2);
parquetFileMetadataCache.invalidateAll();
testSingleRead(new Iterable<?>[] { values }, columnNames, columnTypes, parquetMetadataSource, tempFile.getFile());
assertEquals(parquetFileMetadataCache.stats().missCount(), 2);
assertEquals(parquetFileMetadataCache.stats().hitCount(), 2);
testSingleRead(new Iterable<?>[] { values }, columnNames, columnTypes, parquetMetadataSource, tempFile.getFile());
assertEquals(parquetFileMetadataCache.stats().missCount(), 2);
assertEquals(parquetFileMetadataCache.stats().hitCount(), 3);
}
}
use of com.google.common.cache.Cache in project presto by prestodb.
the class StorageModule method createOrcFileTailSource.
@Singleton
@Provides
public OrcFileTailSource createOrcFileTailSource(OrcCacheConfig orcCacheConfig, MBeanExporter exporter) {
int expectedFileTailSizeInBytes = toIntExact(orcCacheConfig.getExpectedFileTailSize().toBytes());
boolean dwrfStripeCacheEnabled = orcCacheConfig.isDwrfStripeCacheEnabled();
OrcFileTailSource orcFileTailSource = new StorageOrcFileTailSource(expectedFileTailSizeInBytes, dwrfStripeCacheEnabled);
if (orcCacheConfig.isFileTailCacheEnabled()) {
Cache<OrcDataSourceId, OrcFileTail> cache = CacheBuilder.newBuilder().maximumWeight(orcCacheConfig.getFileTailCacheSize().toBytes()).weigher((id, tail) -> ((OrcFileTail) tail).getTotalSize()).expireAfterAccess(orcCacheConfig.getFileTailCacheTtlSinceLastAccess().toMillis(), TimeUnit.MILLISECONDS).recordStats().build();
CacheStatsMBean cacheStatsMBean = new CacheStatsMBean(cache);
orcFileTailSource = new CachingOrcFileTailSource(orcFileTailSource, cache);
exporter.export(generatedNameOf(CacheStatsMBean.class, connectorId + "_OrcFileTail"), cacheStatsMBean);
}
return orcFileTailSource;
}
Aggregations