use of ddf.catalog.resource.data.ReliableResource in project ddf by codice.
the class ProductCacheDirListener method entryRemoved.
@Override
public void entryRemoved(EntryEvent<K, V> event) {
V value = event.getValue();
if (value.getClass().isAssignableFrom(ReliableResource.class)) {
ReliableResource resource = (ReliableResource) value;
LOGGER.debug("entry removed event triggered: {}", resource.getKey());
if (manuallyEvictedEntries.contains(resource.getKey())) {
manuallyEvictedEntries.remove(resource.getKey());
} else {
cacheDirSize.addAndGet(-resource.getSize());
}
}
}
use of ddf.catalog.resource.data.ReliableResource in project ddf by codice.
the class ResourceCacheImpl method getValid.
/**
* @param key
* @return Resource, {@code null} if not found.
*/
@Override
public Resource getValid(String key, Metacard latestMetacard) {
LOGGER.trace("ENTERING: get()");
if (key == null) {
throw new IllegalArgumentException("Must specify non-null key");
}
if (latestMetacard == null) {
throw new IllegalArgumentException("Must specify non-null metacard");
}
LOGGER.debug("key {}", key);
ReliableResource cachedResource = (ReliableResource) cache.get(key);
// cache directory has had files deleted from it.
if (cachedResource != null) {
if (!validateCacheEntry(cachedResource, latestMetacard)) {
LOGGER.debug("Entry found in cache was out-of-date or otherwise invalid. Will need to be re-cached. Entry key: {} " + key);
return null;
}
if (cachedResource.hasProduct()) {
LOGGER.trace("EXITING: get() for key {}", key);
return cachedResource;
} else {
cache.remove(key);
LOGGER.debug("Entry found in the cache, but no product found in cache directory for key = {} " + key);
return null;
}
} else {
LOGGER.debug("No product found in cache for key = {}", key);
return null;
}
}
use of ddf.catalog.resource.data.ReliableResource in project ddf by codice.
the class ResourceCacheImplSizeLimitTest method testCacheDirMaxSizeManyEntries.
@Test
public void testCacheDirMaxSizeManyEntries() throws IOException, InterruptedException {
HazelcastInstance instance = initializeTestHazelcastInstance();
listener.setHazelcastInstance(instance);
listener.setMaxDirSizeBytes(10);
IMap<String, ReliableResource> cacheMap = instance.getMap(PRODUCT_CACHE_NAME);
//Simulate adding product to product cache
String rrKeyPrefix = "rr";
String rr1FileNameBase = "10bytes.txt";
int indexOfRemainingEntry = 11;
for (int i = 0; i < 11; i++) {
simulateAddFileToProductCache(rrKeyPrefix + i, rr1FileNameBase, i + rr1FileNameBase, cacheMap);
}
//not in loop in order to slightly delay this file being added to the cache so it is sorted correctly and not accidentally removed
simulateAddFileToProductCache(rrKeyPrefix + 11, rr1FileNameBase, 11 + rr1FileNameBase, cacheMap);
//entries from 0-10 should be removed from cache
for (int i = 0; i < 11; i++) {
verifyRemovedFromCache(cacheMap, rrKeyPrefix + 1, i + rr1FileNameBase);
}
verifyCached(cacheMap, rrKeyPrefix + indexOfRemainingEntry, indexOfRemainingEntry + rr1FileNameBase);
}
use of ddf.catalog.resource.data.ReliableResource in project ddf by codice.
the class ResourceCacheImplSizeLimitTest method testCacheDirMaxSize0.
@Test
public void testCacheDirMaxSize0() throws IOException, InterruptedException {
HazelcastInstance instance = initializeTestHazelcastInstance();
listener.setMaxDirSizeBytes(0);
listener.setHazelcastInstance(instance);
IMap<String, ReliableResource> cacheMap = instance.getMap(PRODUCT_CACHE_NAME);
//Simulate adding product to product cache
String rr1Key = "rr1";
String rr1FileName = "10bytes.txt";
simulateAddFileToProductCache(rr1Key, rr1FileName, rr1FileName, cacheMap);
//ensure that the entry has not been removed from the cache since it doesn't exceed the max size
verifyCached(cacheMap, rr1Key, rr1FileName);
//simulate adding additional product to cache
String rr2Key = "rr2";
String rr2FileName = "15bytes.txt";
simulateAddFileToProductCache(rr2Key, rr2FileName, rr2FileName, cacheMap);
verifyCached(cacheMap, rr2Key, rr2FileName);
}
use of ddf.catalog.resource.data.ReliableResource in project ddf by codice.
the class ResourceCacheImplSizeLimitTest method testExceedCacheDirMaxSizeMultipleEvictions.
@Test
public void testExceedCacheDirMaxSizeMultipleEvictions() throws IOException, InterruptedException {
HazelcastInstance instance = initializeTestHazelcastInstance();
listener.setMaxDirSizeBytes(28);
listener.setHazelcastInstance(instance);
IMap<String, ReliableResource> cacheMap = instance.getMap(PRODUCT_CACHE_NAME);
//Simulate adding product to product cache
String rr1Key = "rr1";
String rr1FileName = "10bytes.txt";
simulateAddFileToProductCache(rr1Key, rr1FileName, rr1FileName, cacheMap);
//ensure that the entry has not been removed from the cache since it doesn't exceed the max size
ReliableResource rrFromCache = (ReliableResource) cacheMap.get(rr1Key);
assertNotNull(rrFromCache);
//simulate adding additional product to cache
String rr2Key = "rr2";
String rr2FileName = "15bytes.txt";
simulateAddFileToProductCache(rr2Key, rr2FileName, rr2FileName, cacheMap);
//simulate adding additional product to cache
String rr3Key = "rr3";
String rr3FileName = "15bytes_B.txt";
simulateAddFileToProductCache(rr3Key, rr3FileName, rr3FileName, cacheMap);
verifyRemovedFromCache(cacheMap, rr1Key, rr1FileName);
verifyRemovedFromCache(cacheMap, rr2Key, rr2FileName);
verifyCached(cacheMap, rr3Key, rr3FileName);
}
Aggregations