use of org.apache.solr.util.ConcurrentLRUCache in project lucene-solr by apache.
the class TestFastLRUCache method doPerfTest.
void doPerfTest(int iter, int cacheSize, int maxKey) {
final RTimer timer = new RTimer();
int lowerWaterMark = cacheSize;
int upperWaterMark = (int) (lowerWaterMark * 1.1);
Random r = random();
ConcurrentLRUCache cache = new ConcurrentLRUCache(upperWaterMark, lowerWaterMark, (upperWaterMark + lowerWaterMark) / 2, upperWaterMark, false, false, null);
boolean getSize = false;
int minSize = 0, maxSize = 0;
for (int i = 0; i < iter; i++) {
cache.put(r.nextInt(maxKey), "TheValue");
int sz = cache.size();
if (!getSize && sz >= cacheSize) {
getSize = true;
minSize = sz;
} else {
if (sz < minSize)
minSize = sz;
else if (sz > maxSize)
maxSize = sz;
}
}
cache.destroy();
System.out.println("time=" + timer.getTime() + ", minSize=" + minSize + ",maxSize=" + maxSize);
}
use of org.apache.solr.util.ConcurrentLRUCache in project lucene-solr by apache.
the class TestJavaBinCodec method doDecodePerf.
public static void doDecodePerf(String[] args) throws Exception {
int arg = 0;
int nThreads = Integer.parseInt(args[arg++]);
int nBuffers = Integer.parseInt(args[arg++]);
final long iter = Long.parseLong(args[arg++]);
int cacheSz = Integer.parseInt(args[arg++]);
Random r = new Random(0);
final byte[][] buffers = new byte[nBuffers][];
for (int bufnum = 0; bufnum < nBuffers; bufnum++) {
SolrDocument sdoc = new SolrDocument();
sdoc.put("id", "my_id_" + bufnum);
sdoc.put("author", str(r, 10 + r.nextInt(10)));
sdoc.put("address", str(r, 20 + r.nextInt(20)));
sdoc.put("license", str(r, 10));
sdoc.put("title", str(r, 5 + r.nextInt(10)));
sdoc.put("modified_dt", r.nextInt(1000000));
sdoc.put("creation_dt", r.nextInt(1000000));
sdoc.put("birthdate_dt", r.nextInt(1000000));
sdoc.put("clean", r.nextBoolean());
sdoc.put("dirty", r.nextBoolean());
sdoc.put("employed", r.nextBoolean());
sdoc.put("priority", r.nextInt(100));
sdoc.put("dependents", r.nextInt(6));
sdoc.put("level", r.nextInt(101));
sdoc.put("education_level", r.nextInt(10));
// higher level of reuse for string values
sdoc.put("state", "S" + r.nextInt(50));
sdoc.put("country", "Country" + r.nextInt(20));
sdoc.put("some_boolean", "" + r.nextBoolean());
sdoc.put("another_boolean", "" + r.nextBoolean());
buffers[bufnum] = getBytes(sdoc);
}
int ret = 0;
final RTimer timer = new RTimer();
// the cache in the first version of the patch was 10000,9000,10000,1000,false,true,null
ConcurrentLRUCache underlyingCache = cacheSz > 0 ? new ConcurrentLRUCache<>(cacheSz, cacheSz - cacheSz / 10, cacheSz, cacheSz / 10, false, true, null) : null;
final JavaBinCodec.StringCache stringCache = underlyingCache == null ? null : new JavaBinCodec.StringCache(underlyingCache);
if (nThreads <= 0) {
ret += doDecode(buffers, iter, stringCache);
} else {
runInThreads(nThreads, () -> {
try {
doDecode(buffers, iter, stringCache);
} catch (IOException e) {
e.printStackTrace();
}
});
}
long n = iter * Math.max(1, nThreads);
System.out.println("ret=" + ret + " THROUGHPUT=" + (n * 1000 / timer.getTime()));
if (underlyingCache != null)
System.out.println("cache: hits=" + underlyingCache.getStats().getCumulativeHits() + " lookups=" + underlyingCache.getStats().getCumulativeLookups() + " size=" + underlyingCache.getStats().getCurrentSize());
}
Aggregations