use of org.apache.flink.shaded.guava30.com.google.common.cache.RemovalListener in project pinot by linkedin.
the class ThirdEyeCacheRegistry method initCaches.
private static void initCaches(ThirdEyeConfiguration config) {
ThirdEyeCacheRegistry cacheRegistry = ThirdEyeCacheRegistry.getInstance();
RemovalListener<PinotQuery, ResultSetGroup> listener = new RemovalListener<PinotQuery, ResultSetGroup>() {
@Override
public void onRemoval(RemovalNotification<PinotQuery, ResultSetGroup> notification) {
LOGGER.info("Expired {}", notification.getKey().getPql());
}
};
// ResultSetGroup Cache. The size of this cache is limited by the total number of buckets in all ResultSetGroup.
// We estimate that 1 bucket (including overhead) consumes 1KB and this cache is allowed to use up to 50% of max
// heap space.
long maxBucketNumber = getApproximateMaxBucketNumber(DEFAULT_HEAP_PERCENTAGE_FOR_RESULTSETGROUP_CACHE);
LoadingCache<PinotQuery, ResultSetGroup> resultSetGroupCache = CacheBuilder.newBuilder().removalListener(listener).expireAfterAccess(1, TimeUnit.HOURS).maximumWeight(maxBucketNumber).weigher((pinotQuery, resultSetGroup) -> {
int resultSetCount = resultSetGroup.getResultSetCount();
int weight = 0;
for (int idx = 0; idx < resultSetCount; ++idx) {
com.linkedin.pinot.client.ResultSet resultSet = resultSetGroup.getResultSet(idx);
weight += (resultSet.getColumnCount() * resultSet.getRowCount());
}
return weight;
}).build(new ResultSetGroupCacheLoader(pinotThirdeyeClientConfig));
cacheRegistry.registerResultSetGroupCache(resultSetGroupCache);
LOGGER.info("Max bucket number for ResultSetGroup cache is set to {}", maxBucketNumber);
// CollectionMaxDataTime Cache
LoadingCache<String, Long> collectionMaxDataTimeCache = CacheBuilder.newBuilder().refreshAfterWrite(5, TimeUnit.MINUTES).build(new CollectionMaxDataTimeCacheLoader(resultSetGroupCache, datasetConfigDAO));
cacheRegistry.registerCollectionMaxDataTimeCache(collectionMaxDataTimeCache);
// Query Cache
QueryCache queryCache = new QueryCache(thirdEyeClient, Executors.newFixedThreadPool(10));
cacheRegistry.registerQueryCache(queryCache);
// Dimension Filter cache
LoadingCache<String, String> dimensionFiltersCache = CacheBuilder.newBuilder().build(new DimensionFiltersCacheLoader(cacheRegistry.getQueryCache()));
cacheRegistry.registerDimensionFiltersCache(dimensionFiltersCache);
// Dashboards cache
LoadingCache<String, String> dashboardsCache = CacheBuilder.newBuilder().build(new DashboardsCacheLoader(dashboardConfigDAO));
cacheRegistry.registerDashboardsCache(dashboardsCache);
// Collections cache
CollectionsCache collectionsCache = new CollectionsCache(datasetConfigDAO, config);
cacheRegistry.registerCollectionsCache(collectionsCache);
// DatasetConfig cache
LoadingCache<String, DatasetConfigDTO> datasetConfigCache = CacheBuilder.newBuilder().build(new DatasetConfigCacheLoader(datasetConfigDAO));
cacheRegistry.registerDatasetConfigCache(datasetConfigCache);
// MetricConfig cache
LoadingCache<MetricDataset, MetricConfigDTO> metricConfigCache = CacheBuilder.newBuilder().build(new MetricConfigCacheLoader(metricConfigDAO));
cacheRegistry.registerMetricConfigCache(metricConfigCache);
// DashboardConfigs cache
LoadingCache<String, List<DashboardConfigDTO>> dashboardConfigsCache = CacheBuilder.newBuilder().build(new DashboardConfigCacheLoader(dashboardConfigDAO));
cacheRegistry.registerDashboardConfigsCache(dashboardConfigsCache);
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.RemovalListener in project airavata by apache.
the class Factory method loadConfiguration.
public static void loadConfiguration() throws GFacException {
GFacYamlConfigruation config = new GFacYamlConfigruation();
try {
for (JobSubmitterTaskConfig jobSubmitterTaskConfig : config.getJobSbumitters()) {
String taskClass = jobSubmitterTaskConfig.getTaskClass();
Class<?> aClass = Class.forName(taskClass);
Constructor<?> constructor = aClass.getConstructor();
JobSubmissionTask task = (JobSubmissionTask) constructor.newInstance();
task.init(jobSubmitterTaskConfig.getProperties());
jobSubmissionTask.put(jobSubmitterTaskConfig.getSubmissionProtocol(), task);
}
for (DataTransferTaskConfig dataTransferTaskConfig : config.getFileTransferTasks()) {
String taskClass = dataTransferTaskConfig.getTaskClass();
Class<?> aClass = Class.forName(taskClass);
Constructor<?> constructor = aClass.getConstructor();
Task task = (Task) constructor.newInstance();
task.init(dataTransferTaskConfig.getProperties());
dataMovementTask.put(dataTransferTaskConfig.getTransferProtocol(), task);
}
for (ResourceConfig resourceConfig : config.getResourceConfiguration()) {
resources.put(resourceConfig.getJobManagerType(), resourceConfig);
}
} catch (Exception e) {
throw new GFacException("Gfac config issue", e);
}
sessionCache = CacheBuilder.newBuilder().expireAfterAccess(ServerSettings.getSessionCacheAccessTimeout(), TimeUnit.MINUTES).removalListener((RemovalListener<String, Session>) removalNotification -> {
if (removalNotification.getValue().isConnected()) {
log.info("Disconnecting ssh session with key: " + removalNotification.getKey());
removalNotification.getValue().disconnect();
}
log.info("Removed ssh session with key: " + removalNotification.getKey());
}).build();
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.RemovalListener in project EventHub by Codecademy.
the class UserEventIndexModule method getBlockFactory.
@Provides
public UserEventIndex.Block.Factory getBlockFactory(@Named("eventhub.usereventindex.directory") final String directory, @Named("eventhub.usereventindex.blockCacheSize") int blockCacheSize, @Named("eventhub.usereventindex.numRecordsPerBlock") int numRecordsPerBlock, @Named("eventhub.usereventindex.numBlocksPerFile") int numBlocksPerFile) {
final int fileSize = numBlocksPerFile * (numRecordsPerBlock * UserEventIndex.ID_SIZE + UserEventIndex.Block.MetaData.SIZE);
LoadingCache<Integer, MappedByteBuffer> buffers = CacheBuilder.newBuilder().maximumSize(blockCacheSize).recordStats().removalListener(new RemovalListener<Integer, MappedByteBuffer>() {
@Override
public void onRemoval(RemovalNotification<Integer, MappedByteBuffer> notification) {
MappedByteBuffer value = notification.getValue();
if (value != null) {
value.force();
}
}
}).build(new CacheLoader<Integer, MappedByteBuffer>() {
@Override
public MappedByteBuffer load(Integer key) throws Exception {
return ByteBufferUtil.createNewBuffer(String.format("%s/block_%d.mem", directory, key), fileSize);
}
});
String filename = directory + "block_factory.ser";
File file = new File(filename);
if (file.exists()) {
try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(file))) {
long currentPointer = ois.readLong();
return new UserEventIndex.Block.Factory(filename, buffers, numRecordsPerBlock, numBlocksPerFile, currentPointer);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return new UserEventIndex.Block.Factory(filename, buffers, numRecordsPerBlock, numBlocksPerFile, 0);
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.RemovalListener in project EventHub by Codecademy.
the class DmaList method build.
public static <T> DmaList<T> build(final Schema<T> schema, final String directory, final int numRecordsPerFile, int cacheSize) {
// noinspection ResultOfMethodCallIgnored
new File(directory).mkdirs();
try (RandomAccessFile raf = new RandomAccessFile(new File(String.format("%s/meta_data.mem", directory)), "rw")) {
MappedByteBuffer metaDataBuffer = raf.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, 8);
long numRecords = metaDataBuffer.getLong();
final int fileSize = numRecordsPerFile * schema.getObjectSize();
LoadingCache<Integer, MappedByteBuffer> buffers = CacheBuilder.newBuilder().maximumSize(cacheSize).recordStats().removalListener(new RemovalListener<Integer, MappedByteBuffer>() {
@Override
public void onRemoval(RemovalNotification<Integer, MappedByteBuffer> notification) {
MappedByteBuffer value = notification.getValue();
if (value != null) {
value.force();
}
}
}).build(new CacheLoader<Integer, MappedByteBuffer>() {
@Override
public MappedByteBuffer load(Integer key) throws Exception {
return ByteBufferUtil.createNewBuffer(String.format("%s/dma_list_%d.mem", directory, key), fileSize);
}
});
return new DmaList<>(directory, schema, metaDataBuffer, buffers, numRecords, numRecordsPerFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.flink.shaded.guava30.com.google.common.cache.RemovalListener in project kylo by Teradata.
the class JobTrackerService method createCache.
/**
* Creates a job cache.
*/
@Nonnull
private Cache<String, Job<?>> createCache() {
final Cache<String, Job<?>> cache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.HOURS).removalListener(new RemovalListener<String, Job<?>>() {
@Override
public void onRemoval(@Nonnull final RemovalNotification<String, Job<?>> notification) {
// noinspection ConstantConditions
final Optional<Integer> jobId = notification.getValue().getJobId();
if (jobId.isPresent()) {
jobs.remove(jobId.get());
}
for (final StageInfo stage : notification.getValue().getStages()) {
stages.remove(stage.stageId());
}
}
}).build();
// Schedule clean-up of groups
executor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
groups.cleanUp();
}
}, 1, 1, TimeUnit.HOURS);
return cache;
}
Aggregations