Search in sources :

Example 11 with MetricsMap

use of org.apache.solr.metrics.MetricsMap in project lucene-solr by apache.

the class ExitableDirectoryReaderTest method testCacheAssumptions.

// There are lots of assumptions about how/when cache entries should be changed in this method. The
// simple case above shows the root problem without the confusion. testFilterSimpleCase should be
// removed once it is running and this test should be un-ignored and the assumptions verified.
// With all the weirdness, I'm not going to vouch for this test. Feel free to change it.
@Test
public void testCacheAssumptions() throws Exception {
    String fq = "name:d*";
    SolrCore core = h.getCore();
    MetricsMap filterCacheStats = (MetricsMap) core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache");
    long fqInserts = (long) filterCacheStats.getValue().get("inserts");
    MetricsMap queryCacheStats = (MetricsMap) core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache");
    long qrInserts = (long) queryCacheStats.getValue().get("inserts");
    // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed and it gets 100 back and the for loop below
    // succeeds.
    String response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep));
    Map res = (Map) ObjectBuilder.fromJSON(response);
    Map body = (Map) (res.get("response"));
    assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS);
    Map header = (Map) (res.get("responseHeader"));
    assertTrue("Should have partial results", (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)));
    assertEquals("Should NOT have inserted partial results in the cache!", (long) queryCacheStats.getValue().get("inserts"), qrInserts);
    assertEquals("Should NOT have another insert", fqInserts, (long) filterCacheStats.getValue().get("inserts"));
    // At the end of all this, we should have no hits in the queryResultCache.
    response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", longTimeout));
    // Check that we did insert this one.
    assertEquals("Hits should still be 0", (long) filterCacheStats.getValue().get("hits"), 0L);
    assertEquals("Inserts should be bumped", (long) filterCacheStats.getValue().get("inserts"), fqInserts + 1);
    res = (Map) ObjectBuilder.fromJSON(response);
    body = (Map) (res.get("response"));
    assertEquals("Should have exactly " + NUM_DOCS, (long) (body.get("numFound")), NUM_DOCS);
    header = (Map) (res.get("responseHeader"));
    assertTrue("Should NOT have partial results", header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY) == null);
}
Also used : MetricsMap(org.apache.solr.metrics.MetricsMap) Map(java.util.Map) MetricsMap(org.apache.solr.metrics.MetricsMap) Test(org.junit.Test)

Example 12 with MetricsMap

use of org.apache.solr.metrics.MetricsMap in project lucene-solr by apache.

the class HdfsDirectoryFactoryTest method testLocalityReporter.

@Test
public void testLocalityReporter() throws Exception {
    Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
    conf.set("dfs.permissions.enabled", "false");
    Random r = random();
    HdfsDirectoryFactory factory = new HdfsDirectoryFactory();
    SolrMetricManager metricManager = new SolrMetricManager();
    String registry = TestUtil.randomSimpleString(r, 2, 10);
    String scope = TestUtil.randomSimpleString(r, 2, 10);
    Map<String, String> props = new HashMap<String, String>();
    props.put(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr");
    props.put(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "false");
    props.put(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_ENABLE, "false");
    props.put(HdfsDirectoryFactory.LOCALITYMETRICS_ENABLED, "true");
    factory.init(new NamedList<>(props));
    factory.initializeMetrics(metricManager, registry, scope);
    // get the metrics map for the locality bean
    MetricsMap metrics = (MetricsMap) metricManager.registry(registry).getMetrics().get("OTHER." + scope + ".hdfsLocality");
    // We haven't done anything, so there should be no data
    Map<String, Object> statistics = metrics.getValue();
    assertEquals("Saw bytes that were not written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), 0l, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL));
    assertEquals("Counted bytes as local when none written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO), 0, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO));
    // create a directory and a file
    String path = HdfsTestUtil.getURI(dfsCluster) + "/solr3/";
    Directory dir = factory.create(path, NoLockFactory.INSTANCE, DirContext.DEFAULT);
    try (IndexOutput writer = dir.createOutput("output", null)) {
        writer.writeLong(42l);
    }
    final long long_bytes = Long.SIZE / Byte.SIZE;
    // no locality because hostname not set
    factory.setHost("bogus");
    statistics = metrics.getValue();
    assertEquals("Wrong number of total bytes counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL));
    assertEquals("Wrong number of total blocks counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL), 1, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL));
    assertEquals("Counted block as local when bad hostname set: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL), 0, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL));
    // set hostname and check again
    factory.setHost("127.0.0.1");
    statistics = metrics.getValue();
    assertEquals("Did not count block as local after setting hostname: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL), long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL));
    factory.close();
}
Also used : MetricsMap(org.apache.solr.metrics.MetricsMap) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) IndexOutput(org.apache.lucene.store.IndexOutput) Random(java.util.Random) SolrMetricManager(org.apache.solr.metrics.SolrMetricManager) Directory(org.apache.lucene.store.Directory) Test(org.junit.Test)

Example 13 with MetricsMap

use of org.apache.solr.metrics.MetricsMap in project lucene-solr by apache.

the class BJQParserTest method testCacheHit.

@Test
public void testCacheHit() throws IOException {
    MetricsMap parentFilterCache = (MetricsMap) h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegFilter");
    MetricsMap filterCache = (MetricsMap) h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache");
    Map<String, Object> parentsBefore = parentFilterCache.getValue();
    Map<String, Object> filtersBefore = filterCache.getValue();
    // it should be weird enough to be uniq
    String parentFilter = "parent_s:([a TO c] [d TO f])";
    assertQ("search by parent filter", req("q", "{!parent which=\"" + parentFilter + "\"}"), "//*[@numFound='6']");
    assertQ("filter by parent filter", req("q", "*:*", "fq", "{!parent which=\"" + parentFilter + "\"}"), "//*[@numFound='6']");
    assertEquals("didn't hit fqCache yet ", 0L, delta("hits", filterCache.getValue(), filtersBefore));
    assertQ("filter by join", req("q", "*:*", "fq", "{!parent which=\"" + parentFilter + "\"}child_s:l"), "//*[@numFound='6']");
    assertEquals("in cache mode every request lookups", 3, delta("lookups", parentFilterCache.getValue(), parentsBefore));
    assertEquals("last two lookups causes hits", 2, delta("hits", parentFilterCache.getValue(), parentsBefore));
    assertEquals("the first lookup gets insert", 1, delta("inserts", parentFilterCache.getValue(), parentsBefore));
    assertEquals("true join query is cached in fqCache", 1L, delta("lookups", filterCache.getValue(), filtersBefore));
}
Also used : MetricsMap(org.apache.solr.metrics.MetricsMap) Test(org.junit.Test)

Example 14 with MetricsMap

use of org.apache.solr.metrics.MetricsMap in project lucene-solr by apache.

the class TestScoreJoinQPScore method testCacheHit.

public void testCacheHit() throws Exception {
    indexDataForScorring();
    Map<String, Metric> metrics = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()).getMetrics();
    MetricsMap mm = (MetricsMap) metrics.get("CACHE.searcher.queryResultCache");
    {
        Map<String, Object> statPre = mm.getValue();
        h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true"));
        assertHitOrInsert(mm.getValue(), statPre);
    }
    {
        Map<String, Object> statPre = mm.getValue();
        h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true"));
        assertHit(mm.getValue(), statPre);
    }
    {
        Map<String, Object> statPre = mm.getValue();
        Random r = random();
        boolean changed = false;
        boolean x = false;
        String from = (x = r.nextBoolean()) ? "id" : "movieId_s";
        changed |= x;
        String to = (x = r.nextBoolean()) ? "movieId_s" : "id";
        changed |= x;
        String score = (x = r.nextBoolean()) ? not(ScoreMode.Avg).name() : "Avg";
        changed |= x;
        /* till SOLR-7814
       * String boost = (x = r.nextBoolean()) ? "23" : "1";
      changed |= x; */
        String q = (!changed) ? (r.nextBoolean() ? "title:first^67" : "title:night") : "title:first";
        final String resp = h.query(req("q", "{!join from=" + from + " to=" + to + " score=" + score + //" b=" + boost + 
        "}" + q, "fl", "id", "omitHeader", "true"));
        assertInsert(mm.getValue(), statPre);
        statPre = mm.getValue();
        final String repeat = h.query(req("q", "{!join from=" + from + " to=" + to + " score=" + score.toLowerCase(Locale.ROOT) + //" b=" + boost
        "}" + q, "fl", "id", "omitHeader", "true"));
        assertHit(mm.getValue(), statPre);
        assertEquals("lowercase shouldn't change anything", resp, repeat);
        final String aMod = score.substring(0, score.length() - 1);
        assertQEx("exception on " + aMod, "ScoreMode", req("q", "{!join from=" + from + " to=" + to + " score=" + aMod + "}" + q, "fl", "id", "omitHeader", "true"), SolrException.ErrorCode.BAD_REQUEST);
    }
    // this queries are not overlap, with other in this test case. 
    // however it might be better to extract this method into the separate suite
    // for a while let's nuke a cache content, in case of repetitions
    SolrCache cache = (SolrCache) h.getCore().getInfoRegistry().get("queryResultCache");
    cache.clear();
}
Also used : MetricsMap(org.apache.solr.metrics.MetricsMap) Random(java.util.Random) SolrCache(org.apache.solr.search.SolrCache) Metric(com.codahale.metrics.Metric) MetricsMap(org.apache.solr.metrics.MetricsMap) Map(java.util.Map)

Example 15 with MetricsMap

use of org.apache.solr.metrics.MetricsMap in project lucene-solr by apache.

the class TestSolrFieldCacheBean method assertEntryListNotIncluded.

private void assertEntryListNotIncluded(boolean checkJmx) {
    SolrFieldCacheBean mbean = new SolrFieldCacheBean();
    Random r = random();
    String registryName = TestUtil.randomSimpleString(r, 1, 10);
    SolrMetricManager metricManager = h.getCoreContainer().getMetricManager();
    mbean.initializeMetrics(metricManager, registryName, null);
    MetricsMap metricsMap = (MetricsMap) metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache");
    Map<String, Object> metrics = checkJmx ? metricsMap.getValue(true) : metricsMap.getValue();
    assertTrue(((Number) metrics.get("entries_count")).longValue() > 0);
    assertNull(metrics.get("total_size"));
    assertNull(metrics.get("entry#0"));
}
Also used : MetricsMap(org.apache.solr.metrics.MetricsMap) Random(java.util.Random) SolrMetricManager(org.apache.solr.metrics.SolrMetricManager)

Aggregations

MetricsMap (org.apache.solr.metrics.MetricsMap)26 SolrMetricManager (org.apache.solr.metrics.SolrMetricManager)10 Map (java.util.Map)8 Test (org.junit.Test)7 MetricRegistry (com.codahale.metrics.MetricRegistry)5 MethodHandles (java.lang.invoke.MethodHandles)5 HashSet (java.util.HashSet)5 Set (java.util.Set)5 SolrException (org.apache.solr.common.SolrException)5 Logger (org.slf4j.Logger)5 LoggerFactory (org.slf4j.LoggerFactory)5 HashMap (java.util.HashMap)4 Random (java.util.Random)4 List (java.util.List)3 TimeUnit (java.util.concurrent.TimeUnit)3 IOException (java.io.IOException)2 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)2 ModifiableSolrParams (org.apache.solr.common.params.ModifiableSolrParams)2 Gauge (com.codahale.metrics.Gauge)1 JmxReporter (com.codahale.metrics.JmxReporter)1