Search in sources :

Example 6 with FieldStatsInfo

use of org.apache.solr.client.solrj.response.FieldStatsInfo in project lucene-solr by apache.

the class DistributedFacetPivotSmallTest method testPivotFacetRangeAndQuery.

private void testPivotFacetRangeAndQuery() throws Exception {
    SolrParams params = params("q", "*:*", "rows", "0", "facet", "true", "stats", "true", "facet.pivot", "{!range=s1 query=s2 stats=s3}place_t,company_t", "facet.range", "{!tag=s1 key=price}price_ti", "facet.query", "{!tag=s2 key=highPrice}price_ti:[25 TO 100]", "facet.query", "{!tag=s2 key=lowPrice}price_ti:[0 TO 20]", "stats.field", ("{!tag=s3 key=avg_price}price_ti"), "facet.range.start", "0", "facet.range.end", "100", "facet.range.gap", "20", FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT, FacetParams.FACET_LIMIT, "2");
    UnorderedEqualityArrayList<PivotField> expectedPlacePivots = new UnorderedEqualityArrayList<>();
    UnorderedEqualityArrayList<PivotField> expectedDublinPivots = new UnorderedEqualityArrayList<>();
    expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", 4, null, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 1, 2 }), createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0)));
    expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", 4, null, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 1, 2 }), createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0)));
    UnorderedEqualityArrayList<PivotField> expectedLondonPivots = new UnorderedEqualityArrayList<>();
    expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, null, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 2, 0 }), createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0)));
    expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", 3, null, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 2, 0 }), createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0)));
    expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, expectedDublinPivots, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 1, 2 }), createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0)));
    expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, expectedLondonPivots, createExpectedQCount(new String[] { "highPrice", "lowPrice" }, new int[] { 3, 0 }), createExpectedRange("price", 0, 100, 20, 0, 3, 0, 0, 0)));
    QueryResponse rsp = query(params);
    List<PivotField> placePivots = rsp.getFacetPivot().get("place_t,company_t");
    assertEquals(expectedPlacePivots, placePivots);
    PivotField dublinPivotField = placePivots.get(0);
    assertEquals("dublin", dublinPivotField.getValue());
    assertEquals(4, dublinPivotField.getCount());
    PivotField microsoftPivotField = dublinPivotField.getPivot().get(0);
    assertEquals("microsoft", microsoftPivotField.getValue());
    assertEquals(4, microsoftPivotField.getCount());
    FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price");
    assertEquals(21.0, (double) dublinMicrosoftStatsInfo.getMean(), 0.1E-7);
    assertEquals(15.0, dublinMicrosoftStatsInfo.getMin());
    assertEquals(29.0, dublinMicrosoftStatsInfo.getMax());
    assertEquals(3, (long) dublinMicrosoftStatsInfo.getCount());
    assertEquals(1, (long) dublinMicrosoftStatsInfo.getMissing());
    assertEquals(63.0, dublinMicrosoftStatsInfo.getSum());
    assertEquals(1427.0, dublinMicrosoftStatsInfo.getSumOfSquares(), 0.1E-7);
    assertEquals(7.211102550927978, dublinMicrosoftStatsInfo.getStddev(), 0.1E-7);
}
Also used : FieldStatsInfo(org.apache.solr.client.solrj.response.FieldStatsInfo) QueryResponse(org.apache.solr.client.solrj.response.QueryResponse) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) SolrParams(org.apache.solr.common.params.SolrParams) PivotField(org.apache.solr.client.solrj.response.PivotField)

Example 7 with FieldStatsInfo

use of org.apache.solr.client.solrj.response.FieldStatsInfo in project lucene-solr by apache.

the class DistributedFacetPivotSmallTest method doTestDeepPivotStats.

/**
   * @param justMean - only the mean stat is requested/computed
   */
private void doTestDeepPivotStats(boolean justMean) throws Exception {
    SolrParams params = params("q", "*:*", "rows", "0", "facet", "true", "stats", "true", "facet.pivot", "{!stats=s1}place_t,company_t", "stats.field", ("{!key=avg_price tag=s1 " + (justMean ? "mean=true" : "") + "}price_ti"));
    QueryResponse rsp = query(params);
    List<PivotField> placePivots = rsp.getFacetPivot().get("place_t,company_t");
    PivotField dublinPivotField = placePivots.get(0);
    assertEquals("dublin", dublinPivotField.getValue());
    assertEquals(4, dublinPivotField.getCount());
    PivotField microsoftPivotField = dublinPivotField.getPivot().get(0);
    assertEquals("microsoft", microsoftPivotField.getValue());
    assertEquals(4, microsoftPivotField.getCount());
    FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price");
    assertEquals(21.0, (double) dublinMicrosoftStatsInfo.getMean(), 0.1E-7);
    if (justMean) {
        assertNull(dublinMicrosoftStatsInfo.getMin());
        assertNull(dublinMicrosoftStatsInfo.getMax());
        assertNull(dublinMicrosoftStatsInfo.getCount());
        assertNull(dublinMicrosoftStatsInfo.getMissing());
        assertNull(dublinMicrosoftStatsInfo.getSum());
        assertNull(dublinMicrosoftStatsInfo.getSumOfSquares());
        assertNull(dublinMicrosoftStatsInfo.getStddev());
    } else {
        assertEquals(15.0, dublinMicrosoftStatsInfo.getMin());
        assertEquals(29.0, dublinMicrosoftStatsInfo.getMax());
        assertEquals(3, (long) dublinMicrosoftStatsInfo.getCount());
        assertEquals(1, (long) dublinMicrosoftStatsInfo.getMissing());
        assertEquals(63.0, dublinMicrosoftStatsInfo.getSum());
        assertEquals(1427.0, dublinMicrosoftStatsInfo.getSumOfSquares(), 0.1E-7);
        assertEquals(7.211102550927978, dublinMicrosoftStatsInfo.getStddev(), 0.1E-7);
    }
    PivotField cardiffPivotField = placePivots.get(2);
    assertEquals("cardiff", cardiffPivotField.getValue());
    assertEquals(3, cardiffPivotField.getCount());
    PivotField polecatPivotField = cardiffPivotField.getPivot().get(0);
    assertEquals("polecat", polecatPivotField.getValue());
    assertEquals(3, polecatPivotField.getCount());
    FieldStatsInfo cardiffPolecatStatsInfo = polecatPivotField.getFieldStatsInfo().get("avg_price");
    assertEquals(27.0, (double) cardiffPolecatStatsInfo.getMean(), 0.1E-7);
    if (justMean) {
        assertNull(cardiffPolecatStatsInfo.getMin());
        assertNull(cardiffPolecatStatsInfo.getMax());
        assertNull(cardiffPolecatStatsInfo.getCount());
        assertNull(cardiffPolecatStatsInfo.getMissing());
        assertNull(cardiffPolecatStatsInfo.getSum());
        assertNull(cardiffPolecatStatsInfo.getSumOfSquares());
        assertNull(cardiffPolecatStatsInfo.getStddev());
    } else {
        assertEquals(15.0, cardiffPolecatStatsInfo.getMin());
        assertEquals(39.0, cardiffPolecatStatsInfo.getMax());
        assertEquals(2, (long) cardiffPolecatStatsInfo.getCount());
        assertEquals(1, (long) cardiffPolecatStatsInfo.getMissing());
        assertEquals(54.0, cardiffPolecatStatsInfo.getSum());
        assertEquals(1746.0, cardiffPolecatStatsInfo.getSumOfSquares(), 0.1E-7);
        assertEquals(16.97056274847714, cardiffPolecatStatsInfo.getStddev(), 0.1E-7);
    }
    PivotField krakowPivotField = placePivots.get(3);
    assertEquals("krakow", krakowPivotField.getValue());
    assertEquals(3, krakowPivotField.getCount());
    PivotField fujitsuPivotField = krakowPivotField.getPivot().get(3);
    assertEquals("fujitsu", fujitsuPivotField.getValue());
    assertEquals(1, fujitsuPivotField.getCount());
    FieldStatsInfo krakowFujitsuStatsInfo = fujitsuPivotField.getFieldStatsInfo().get("avg_price");
    assertEquals(Double.NaN, (double) krakowFujitsuStatsInfo.getMean(), 0.1E-7);
    if (justMean) {
        assertNull(krakowFujitsuStatsInfo.getMin());
        assertNull(krakowFujitsuStatsInfo.getMax());
        assertNull(krakowFujitsuStatsInfo.getCount());
        assertNull(krakowFujitsuStatsInfo.getMissing());
        assertNull(krakowFujitsuStatsInfo.getSum());
        assertNull(krakowFujitsuStatsInfo.getSumOfSquares());
        assertNull(krakowFujitsuStatsInfo.getStddev());
    } else {
        assertEquals(null, krakowFujitsuStatsInfo.getMin());
        assertEquals(null, krakowFujitsuStatsInfo.getMax());
        assertEquals(0, (long) krakowFujitsuStatsInfo.getCount());
        assertEquals(1, (long) krakowFujitsuStatsInfo.getMissing());
        assertEquals(0.0, krakowFujitsuStatsInfo.getSum());
        assertEquals(0.0, krakowFujitsuStatsInfo.getSumOfSquares(), 0.1E-7);
        assertEquals(Double.NaN, (double) krakowFujitsuStatsInfo.getMean(), 0.1E-7);
        assertEquals(0.0, krakowFujitsuStatsInfo.getStddev(), 0.1E-7);
    }
}
Also used : FieldStatsInfo(org.apache.solr.client.solrj.response.FieldStatsInfo) QueryResponse(org.apache.solr.client.solrj.response.QueryResponse) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) SolrParams(org.apache.solr.common.params.SolrParams) PivotField(org.apache.solr.client.solrj.response.PivotField)

Example 8 with FieldStatsInfo

use of org.apache.solr.client.solrj.response.FieldStatsInfo in project lucene-solr by apache.

the class SolrExampleTests method testPivotFacetsStats.

@Test
public void testPivotFacetsStats() throws Exception {
    SolrClient client = getSolrClient();
    // Empty the database...
    // delete everything!
    client.deleteByQuery("*:*");
    client.commit();
    // make sure it got in
    assertNumFound("*:*", 0);
    int id = 1;
    ArrayList<SolrInputDocument> docs = new ArrayList<>();
    docs.add(makeTestDoc("id", id++, "features", "aaa", "manu", "apple", "cat", "a", "inStock", true, "popularity", 12, "price", .017));
    docs.add(makeTestDoc("id", id++, "features", "aaa", "manu", "lg", "cat", "a", "inStock", false, "popularity", 13, "price", 16.04));
    docs.add(makeTestDoc("id", id++, "features", "aaa", "manu", "samsung", "cat", "a", "inStock", true, "popularity", 14, "price", 12.34));
    docs.add(makeTestDoc("id", id++, "features", "aaa", "manu", "lg", "cat", "b", "inStock", false, "popularity", 24, "price", 51.39));
    docs.add(makeTestDoc("id", id++, "features", "aaa", "manu", "nokia", "cat", "b", "inStock", true, "popularity", 28, "price", 131.39));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "ztc", "cat", "a", "inStock", false, "popularity", 32));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "htc", "cat", "a", "inStock", true, "popularity", 31, "price", 131.39));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "apple", "cat", "b", "inStock", false, "popularity", 36));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "lg", "cat", "b", "inStock", true, "popularity", 37, "price", 1.39));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "ztc", "cat", "b", "inStock", false, "popularity", 38, "price", 47.98));
    docs.add(makeTestDoc("id", id++, "features", "bbb", "manu", "ztc", "cat", "b", "inStock", true, "popularity", -38));
    // something not matching all fields
    docs.add(makeTestDoc("id", id++, "cat", "b"));
    client.add(docs);
    client.commit();
    for (String pivot : new String[] { "{!key=pivot_key stats=s1}features,manu", "{!key=pivot_key stats=s1}features,manu,cat", "{!key=pivot_key stats=s1}features,manu,cat,inStock" }) {
        // for any of these pivot params, the assertions we check should be teh same
        // (we stop asserting at the "manu" level)
        SolrQuery query = new SolrQuery("*:*");
        query.addFacetPivotField(pivot);
        query.setFacetLimit(1);
        query.addGetFieldStatistics("{!key=foo_price tag=s1}price", "{!tag=s1}popularity");
        query.setFacetMinCount(0);
        query.setRows(0);
        QueryResponse rsp = client.query(query);
        // check top (ie: non-pivot) stats
        Map<String, FieldStatsInfo> map = rsp.getFieldStatsInfo();
        FieldStatsInfo intValueStatsInfo = map.get("popularity");
        assertEquals(-38.0d, intValueStatsInfo.getMin());
        assertEquals(38.0d, intValueStatsInfo.getMax());
        assertEquals(11l, intValueStatsInfo.getCount().longValue());
        assertEquals(1l, intValueStatsInfo.getMissing().longValue());
        assertEquals(227.0d, intValueStatsInfo.getSum());
        assertEquals(20.636363636363637d, intValueStatsInfo.getMean());
        FieldStatsInfo doubleValueStatsInfo = map.get("foo_price");
        assertEquals(.017d, (double) doubleValueStatsInfo.getMin(), .01d);
        assertEquals(131.39d, (double) doubleValueStatsInfo.getMax(), .01d);
        assertEquals(8l, doubleValueStatsInfo.getCount().longValue());
        assertEquals(4l, doubleValueStatsInfo.getMissing().longValue());
        assertEquals(391.93d, (double) doubleValueStatsInfo.getSum(), .01d);
        assertEquals(48.99d, (double) doubleValueStatsInfo.getMean(), .01d);
        // now get deeper and look at the pivots...
        NamedList<List<PivotField>> pivots = rsp.getFacetPivot();
        assertTrue(!pivots.get("pivot_key").isEmpty());
        List<PivotField> list = pivots.get("pivot_key");
        PivotField featuresBBBPivot = list.get(0);
        assertEquals("features", featuresBBBPivot.getField());
        assertEquals("bbb", featuresBBBPivot.getValue());
        assertNotNull(featuresBBBPivot.getFieldStatsInfo());
        assertEquals(2, featuresBBBPivot.getFieldStatsInfo().size());
        FieldStatsInfo featuresBBBPivotStats1 = featuresBBBPivot.getFieldStatsInfo().get("foo_price");
        assertEquals("foo_price", featuresBBBPivotStats1.getName());
        assertEquals(131.39d, (double) featuresBBBPivotStats1.getMax(), .01d);
        assertEquals(1.38d, (double) featuresBBBPivotStats1.getMin(), .01d);
        assertEquals(180.75d, (double) featuresBBBPivotStats1.getSum(), .01d);
        assertEquals(3, (long) featuresBBBPivotStats1.getCount());
        assertEquals(3, (long) featuresBBBPivotStats1.getMissing());
        assertEquals(60.25d, (double) featuresBBBPivotStats1.getMean(), .01d);
        assertEquals(65.86d, featuresBBBPivotStats1.getStddev(), .01d);
        assertEquals(19567.34d, featuresBBBPivotStats1.getSumOfSquares(), .01d);
        FieldStatsInfo featuresBBBPivotStats2 = featuresBBBPivot.getFieldStatsInfo().get("popularity");
        assertEquals("popularity", featuresBBBPivotStats2.getName());
        assertEquals(38.0d, (double) featuresBBBPivotStats2.getMax(), .01d);
        assertEquals(-38.0d, (double) featuresBBBPivotStats2.getMin(), .01d);
        assertEquals(136.0d, (double) featuresBBBPivotStats2.getSum(), .01d);
        assertEquals(6, (long) featuresBBBPivotStats2.getCount());
        assertEquals(0, (long) featuresBBBPivotStats2.getMissing());
        assertEquals(22.66d, (double) featuresBBBPivotStats2.getMean(), .01d);
        assertEquals(29.85d, featuresBBBPivotStats2.getStddev(), .01d);
        assertEquals(7538.0d, featuresBBBPivotStats2.getSumOfSquares(), .01d);
        List<PivotField> nestedPivotList = featuresBBBPivot.getPivot();
        PivotField featuresBBBPivotPivot = nestedPivotList.get(0);
        assertEquals("manu", featuresBBBPivotPivot.getField());
        assertEquals("ztc", featuresBBBPivotPivot.getValue());
        assertNotNull(featuresBBBPivotPivot.getFieldStatsInfo());
        assertEquals(2, featuresBBBPivotPivot.getFieldStatsInfo().size());
        FieldStatsInfo featuresBBBManuZtcPivotStats1 = featuresBBBPivotPivot.getFieldStatsInfo().get("foo_price");
        assertEquals("foo_price", featuresBBBManuZtcPivotStats1.getName());
        assertEquals(47.97d, (double) featuresBBBManuZtcPivotStats1.getMax(), .01d);
        assertEquals(47.97d, (double) featuresBBBManuZtcPivotStats1.getMin(), .01d);
        assertEquals(47.97d, (double) featuresBBBManuZtcPivotStats1.getSum(), .01d);
        assertEquals(1, (long) featuresBBBManuZtcPivotStats1.getCount());
        assertEquals(2, (long) featuresBBBManuZtcPivotStats1.getMissing());
        assertEquals(47.97d, (double) featuresBBBManuZtcPivotStats1.getMean(), .01d);
        assertEquals(0.0d, featuresBBBManuZtcPivotStats1.getStddev(), .01d);
        assertEquals(2302.08d, featuresBBBManuZtcPivotStats1.getSumOfSquares(), .01d);
        FieldStatsInfo featuresBBBManuZtcPivotStats2 = featuresBBBPivotPivot.getFieldStatsInfo().get("popularity");
        assertEquals("popularity", featuresBBBManuZtcPivotStats2.getName());
        assertEquals(38.0d, (double) featuresBBBManuZtcPivotStats2.getMax(), .01d);
        assertEquals(-38.0d, (double) featuresBBBManuZtcPivotStats2.getMin(), .01d);
        assertEquals(32.0, (double) featuresBBBManuZtcPivotStats2.getSum(), .01d);
        assertEquals(3, (long) featuresBBBManuZtcPivotStats2.getCount());
        assertEquals(0, (long) featuresBBBManuZtcPivotStats2.getMissing());
        assertEquals(10.66d, (double) featuresBBBManuZtcPivotStats2.getMean(), .01d);
        assertEquals(42.25d, featuresBBBManuZtcPivotStats2.getStddev(), .01d);
        assertEquals(3912.0d, featuresBBBManuZtcPivotStats2.getSumOfSquares(), .01d);
    }
}
Also used : ArrayList(java.util.ArrayList) StringContains.containsString(org.junit.internal.matchers.StringContains.containsString) FieldStatsInfo(org.apache.solr.client.solrj.response.FieldStatsInfo) SolrInputDocument(org.apache.solr.common.SolrInputDocument) ErrorTrackingConcurrentUpdateSolrClient(org.apache.solr.client.solrj.embedded.SolrExampleStreamingTest.ErrorTrackingConcurrentUpdateSolrClient) HttpSolrClient(org.apache.solr.client.solrj.impl.HttpSolrClient) QueryResponse(org.apache.solr.client.solrj.response.QueryResponse) PivotField(org.apache.solr.client.solrj.response.PivotField) SolrDocumentList(org.apache.solr.common.SolrDocumentList) List(java.util.List) ArrayList(java.util.ArrayList) NamedList(org.apache.solr.common.util.NamedList) Test(org.junit.Test)

Example 9 with FieldStatsInfo

use of org.apache.solr.client.solrj.response.FieldStatsInfo in project lucene-solr by apache.

the class SolrExampleTests method testStatistics.

@Test
public void testStatistics() throws Exception {
    SolrClient client = getSolrClient();
    // Empty the database...
    // delete everything!
    client.deleteByQuery("*:*");
    client.commit();
    // make sure it got in
    assertNumFound("*:*", 0);
    String f = "val_i";
    // 0   1   2   3   4   5   6   7   8   9 
    int i = 0;
    int[] nums = new int[] { 23, 26, 38, 46, 55, 63, 77, 84, 92, 94 };
    for (int num : nums) {
        SolrInputDocument doc = new SolrInputDocument();
        doc.setField("id", "doc" + i++);
        doc.setField("name", "doc: " + num);
        doc.setField(f, num);
        client.add(doc);
    }
    client.commit();
    // make sure they all got in
    assertNumFound("*:*", nums.length);
    SolrQuery query = new SolrQuery("*:*");
    query.setRows(0);
    query.setGetFieldStatistics(f);
    QueryResponse rsp = client.query(query);
    FieldStatsInfo stats = rsp.getFieldStatsInfo().get(f);
    assertNotNull(stats);
    assertEquals(23.0, ((Double) stats.getMin()).doubleValue(), 0);
    assertEquals(94.0, ((Double) stats.getMax()).doubleValue(), 0);
    assertEquals(new Long(nums.length), stats.getCount());
    assertEquals(new Long(0), stats.getMissing());
    assertEquals("26.4", stats.getStddev().toString().substring(0, 4));
    // now lets try again with a new set...  (odd median)
    //----------------------------------------------------
    // delete everything!
    client.deleteByQuery("*:*");
    client.commit();
    // make sure it got in
    assertNumFound("*:*", 0);
    nums = new int[] { 5, 7, 10, 19, 20 };
    for (int num : nums) {
        SolrInputDocument doc = new SolrInputDocument();
        doc.setField("id", "doc" + i++);
        doc.setField("name", "doc: " + num);
        doc.setField(f, num);
        client.add(doc);
    }
    client.commit();
    // make sure they all got in
    assertNumFound("*:*", nums.length);
    rsp = client.query(query);
    stats = rsp.getFieldStatsInfo().get(f);
    assertNotNull(stats);
    assertEquals(5.0, ((Double) stats.getMin()).doubleValue(), 0);
    assertEquals(20.0, ((Double) stats.getMax()).doubleValue(), 0);
    assertEquals(new Long(nums.length), stats.getCount());
    assertEquals(new Long(0), stats.getMissing());
    // Now try again with faceting
    //---------------------------------
    // delete everything!
    client.deleteByQuery("*:*");
    client.commit();
    // make sure it got in
    assertNumFound("*:*", 0);
    nums = new int[] { 1, 2, 3, 4, 5, 10, 11, 12, 13, 14 };
    for (i = 0; i < nums.length; i++) {
        int num = nums[i];
        SolrInputDocument doc = new SolrInputDocument();
        doc.setField("id", "doc" + i);
        doc.setField("name", "doc: " + num);
        doc.setField(f, num);
        doc.setField("inStock", i < 5);
        client.add(doc);
    }
    client.commit();
    // make sure they all got in
    assertNumFound("inStock:true", 5);
    // make sure they all got in
    assertNumFound("inStock:false", 5);
    // facet on 'inStock'
    query.addStatsFieldFacets(f, "inStock");
    rsp = client.query(query);
    stats = rsp.getFieldStatsInfo().get(f);
    assertNotNull(stats);
    List<FieldStatsInfo> facets = stats.getFacets().get("inStock");
    assertNotNull(facets);
    assertEquals(2, facets.size());
    FieldStatsInfo inStockF = facets.get(0);
    FieldStatsInfo inStockT = facets.get(1);
    if ("true".equals(inStockF.getName())) {
        FieldStatsInfo tmp = inStockF;
        inStockF = inStockT;
        inStockT = tmp;
    }
    // make sure half went to each
    assertEquals(inStockF.getCount(), inStockT.getCount());
    assertEquals(stats.getCount().longValue(), inStockF.getCount() + inStockT.getCount());
    assertTrue("check that min max faceted ok", ((Double) inStockF.getMin()).doubleValue() < ((Double) inStockF.getMax()).doubleValue());
    assertEquals("they have the same distribution", inStockF.getStddev(), inStockT.getStddev());
}
Also used : FieldStatsInfo(org.apache.solr.client.solrj.response.FieldStatsInfo) SolrInputDocument(org.apache.solr.common.SolrInputDocument) ErrorTrackingConcurrentUpdateSolrClient(org.apache.solr.client.solrj.embedded.SolrExampleStreamingTest.ErrorTrackingConcurrentUpdateSolrClient) HttpSolrClient(org.apache.solr.client.solrj.impl.HttpSolrClient) QueryResponse(org.apache.solr.client.solrj.response.QueryResponse) StringContains.containsString(org.junit.internal.matchers.StringContains.containsString) Test(org.junit.Test)

Example 10 with FieldStatsInfo

use of org.apache.solr.client.solrj.response.FieldStatsInfo in project lucene-solr by apache.

the class TestDistributedSearch method test.

@Test
public void test() throws Exception {
    QueryResponse rsp = null;
    // make a copy so we can restore
    int backupStress = stress;
    del("*:*");
    indexr(id, 1, i1, 100, tlong, 100, t1, "now is the time for all good men", "foo_sev_enum", "Medium", tdate_a, "2010-04-20T11:00:00Z", tdate_b, "2009-08-20T11:00:00Z", "foo_f", 1.414f, "foo_b", "true", "foo_d", 1.414d, s1, "z${foo}");
    indexr(id, 2, i1, 50, tlong, 50, t1, "to come to the aid of their country.", "foo_sev_enum", "Medium", "foo_sev_enum", "High", tdate_a, "2010-05-02T11:00:00Z", tdate_b, "2009-11-02T11:00:00Z", s1, "z${foo}");
    indexr(id, 3, i1, 2, tlong, 2, t1, "how now brown cow", tdate_a, "2010-05-03T11:00:00Z", s1, "z${foo}");
    indexr(id, 4, i1, -100, tlong, 101, t1, "the quick fox jumped over the lazy dog", tdate_a, "2010-05-03T11:00:00Z", tdate_b, "2010-05-03T11:00:00Z", s1, "a");
    indexr(id, 5, i1, 500, tlong, 500, t1, "the quick fox jumped way over the lazy dog", tdate_a, "2010-05-05T11:00:00Z", s1, "b");
    indexr(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", s1, "c");
    indexr(id, 7, i1, 123, tlong, 123, t1, "humpty dumpy had a great fall", s1, "d");
    indexr(id, 8, i1, 876, tlong, 876, tdate_b, "2010-01-05T11:00:00Z", "foo_sev_enum", "High", t1, "all the kings horses and all the kings men", s1, "e");
    indexr(id, 9, i1, 7, tlong, 7, t1, "couldn't put humpty together again", s1, "f");
    // try to ensure there's more than one segment
    commit();
    indexr(id, 10, i1, 4321, tlong, 4321, t1, "this too shall pass", s1, "g");
    indexr(id, 11, i1, -987, tlong, 987, "foo_sev_enum", "Medium", t1, "An eye for eye only ends up making the whole world blind.", s1, "h");
    indexr(id, 12, i1, 379, tlong, 379, t1, "Great works are performed, not by strength, but by perseverance.", s1, "i");
    indexr(id, 13, i1, 232, tlong, 232, t1, "no eggs on wall, lesson learned", oddField, "odd man out", s1, "j");
    // for spellcheck
    indexr(id, "1001", "lowerfilt", "toyota", s1, "k");
    indexr(id, 14, "SubjectTerms_mfacet", new String[] { "mathematical models", "mathematical analysis" }, s1, "l");
    indexr(id, 15, "SubjectTerms_mfacet", new String[] { "test 1", "test 2", "test3" });
    indexr(id, 16, "SubjectTerms_mfacet", new String[] { "test 1", "test 2", "test3" });
    String[] vals = new String[100];
    for (int i = 0; i < 100; i++) {
        vals[i] = "test " + i;
    }
    indexr(id, 17, "SubjectTerms_mfacet", vals);
    for (int i = 100; i < 150; i++) {
        indexr(id, i);
    }
    commit();
    handle.clear();
    handle.put("timestamp", SKIPVAL);
    // not a cloud test, but may use updateLog
    handle.put("_version_", SKIPVAL);
    //Test common query parameters.
    validateCommonQueryParameters();
    // random value sort
    for (String f : fieldNames) {
        query("q", "*:*", "sort", f + " desc");
        query("q", "*:*", "sort", f + " asc");
    }
    // these queries should be exactly ordered and scores should exactly match
    query("q", "*:*", "sort", i1 + " desc");
    query("q", "*:*", "sort", "{!func}testfunc(add(" + i1 + ",5))" + " desc");
    query("q", i1 + "[* TO *]", "sort", i1 + " asc");
    query("q", "*:*", "sort", i1 + " asc, id desc");
    query("q", "*:*", "sort", i1 + " desc", "fl", "*,score");
    query("q", "*:*", "sort", "n_tl1 asc", "fl", "*,score");
    query("q", "*:*", "sort", "n_tl1 desc");
    handle.put("maxScore", SKIPVAL);
    // does not expect maxScore. So if it comes ,ignore it. JavaBinCodec.writeSolrDocumentList()
    query("q", "{!func}" + i1);
    //is agnostic of request params.
    handle.remove("maxScore");
    // even scores should match exactly here
    query("q", "{!func}" + i1, "fl", "*,score");
    handle.put("highlighting", UNORDERED);
    handle.put("response", UNORDERED);
    handle.put("maxScore", SKIPVAL);
    query("q", "quick");
    query("q", "all", "fl", "id", "start", "0");
    // no fields in returned docs
    query("q", "all", "fl", "foofoofoo", "start", "0");
    query("q", "all", "fl", "id", "start", "100");
    handle.put("score", SKIPVAL);
    query("q", "quick", "fl", "*,score");
    query("q", "all", "fl", "*,score", "start", "1");
    query("q", "all", "fl", "*,score", "start", "100");
    query("q", "now their fox sat had put", "fl", "*,score", "hl", "true", "hl.fl", t1);
    query("q", "now their fox sat had put", "fl", "foofoofoo", "hl", "true", "hl.fl", t1);
    query("q", "matchesnothing", "fl", "*,score");
    // test that a single NOW value is propagated to all shards... if that is true
    // then the primary sort should always be a tie and then the secondary should always decide
    query("q", "{!func}ms(NOW)", "sort", "score desc," + i1 + " desc", "fl", "id");
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.field", t1);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.limit", 1);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "quick", "facet.query", "quick", "facet.query", "all", "facet.query", "*:*");
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.mincount", 2);
    // a facet query to test out chars out of the ascii range
    query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "{!term f=foo_s}internationalÿǿ∢㌳");
    // simple field facet on date fields
    rsp = query("q", "*:*", "rows", 0, // TODO: limit shouldn't be needed: SOLR-6386
    "facet", // TODO: limit shouldn't be needed: SOLR-6386
    "true", // TODO: limit shouldn't be needed: SOLR-6386
    "facet.limit", // TODO: limit shouldn't be needed: SOLR-6386
    1, "facet.field", tdate_a);
    assertEquals(1, rsp.getFacetFields().size());
    rsp = query("q", "*:*", "rows", 0, // TODO: limit shouldn't be needed: SOLR-6386
    "facet", // TODO: limit shouldn't be needed: SOLR-6386
    "true", // TODO: limit shouldn't be needed: SOLR-6386
    "facet.limit", // TODO: limit shouldn't be needed: SOLR-6386
    1, "facet.field", tdate_b, "facet.field", tdate_a);
    assertEquals(2, rsp.getFacetFields().size());
    String facetQuery = "id:[1 TO 15]";
    // simple range facet on one field
    query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.method", FacetRangeMethod.FILTER);
    // simple range facet on one field using dv method
    query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.method", FacetRangeMethod.DV);
    // range facet on multiple fields
    query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", i1, "f." + i1 + ".facet.range.start", 300, "f." + i1 + ".facet.range.gap", 87, "facet.range.end", 900, "facet.range.start", 200, "facet.range.gap", 100, "f." + tlong + ".facet.range.end", 900, "f." + i1 + ".facet.range.method", FacetRangeMethod.FILTER, "f." + tlong + ".facet.range.method", FacetRangeMethod.DV);
    // range facet with "other" param
    QueryResponse response = query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.other", "all");
    assertEquals(tlong, response.getFacetRanges().get(0).getName());
    assertEquals(new Integer(6), response.getFacetRanges().get(0).getBefore());
    assertEquals(new Integer(5), response.getFacetRanges().get(0).getBetween());
    assertEquals(new Integer(2), response.getFacetRanges().get(0).getAfter());
    // Test mincounts. Do NOT want to go through all the stuff where with validateControlData in query() method
    // Purposely packing a _bunch_ of stuff together here to insure that the proper level of mincount is used for
    // each
    ModifiableSolrParams minParams = new ModifiableSolrParams();
    minParams.set("q", "*:*");
    minParams.set("rows", 1);
    minParams.set("facet", "true");
    minParams.set("facet.missing", "true");
    minParams.set("facet.field", i1);
    minParams.set("facet.missing", "true");
    minParams.set("facet.mincount", 2);
    // Return a separate section of ranges over i1. Should respect global range mincount
    minParams.set("facet.range", i1);
    minParams.set("f." + i1 + ".facet.range.start", 0);
    minParams.set("f." + i1 + ".facet.range.gap", 200);
    minParams.set("f." + i1 + ".facet.range.end", 1200);
    minParams.set("f." + i1 + ".facet.mincount", 4);
    // Return a separate section of ranges over tlong Should respect facet.mincount
    minParams.add("facet.range", tlong);
    minParams.set("f." + tlong + ".facet.range.start", 0);
    minParams.set("f." + tlong + ".facet.range.gap", 100);
    minParams.set("f." + tlong + ".facet.range.end", 1200);
    // Repeat with a range type of date
    minParams.add("facet.range", tdate_b);
    minParams.set("f." + tdate_b + ".facet.range.start", "2009-02-01T00:00:00Z");
    minParams.set("f." + tdate_b + ".facet.range.gap", "+1YEAR");
    minParams.set("f." + tdate_b + ".facet.range.end", "2011-01-01T00:00:00Z");
    minParams.set("f." + tdate_b + ".facet.mincount", 3);
    // Insure that global mincount is respected for facet queries
    // Should return some counts
    minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]");
    //minParams.set("facet.query", tdate_a + ":[* TO *]"); // Should be removed
    // Should be removed from response
    minParams.add("facet.query", tdate_b + ":[2008-01-01T00:00:00Z TO 2009-09-01T00:00:00Z]");
    setDistributedParams(minParams);
    QueryResponse minResp = queryServer(minParams);
    ModifiableSolrParams eParams = new ModifiableSolrParams();
    eParams.set("q", tdate_b + ":[* TO *]");
    eParams.set("rows", 1000);
    eParams.set("fl", tdate_b);
    setDistributedParams(eParams);
    QueryResponse eResp = queryServer(eParams);
    // Check that exactly the right numbers of counts came through
    assertEquals("Should be exactly 2 range facets returned after minCounts taken into account ", 3, minResp.getFacetRanges().size());
    assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size());
    // Should just be the null entries for field
    checkMinCountsField(minResp.getFacetField(i1).getValues(), new Object[] { null, 55L });
    // range on i1
    checkMinCountsRange(minResp.getFacetRanges().get(0).getCounts(), new Object[] { "0", 5L });
    // range on tlong
    checkMinCountsRange(minResp.getFacetRanges().get(1).getCounts(), new Object[] { "0", 3L, "100", 3L });
    // date (range) on tvh
    checkMinCountsRange(minResp.getFacetRanges().get(2).getCounts(), new Object[] { "2009-02-01T00:00:00Z", 3L });
    assertTrue("Should have a facet for tdate_a", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"));
    int qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]");
    assertEquals("tdate_a should be 5", qCount, 5);
    // Now let's do some queries, the above is getting too complex
    minParams = new ModifiableSolrParams();
    minParams.set("q", "*:*");
    minParams.set("rows", 1);
    minParams.set("facet", "true");
    minParams.set("facet.mincount", 3);
    minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]");
    // Should be removed
    minParams.add("facet.query", tdate_b + ":[2009-01-01T00:00:00Z TO 2010-01-01T00:00:00Z]");
    setDistributedParams(minParams);
    minResp = queryServer(minParams);
    assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size());
    assertTrue("Should be an entry for a_n_tdt", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]"));
    qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]");
    assertEquals("a_n_tdt should have a count of 4 ", qCount, 4);
    //  variations of fl
    query("q", "*:*", "fl", "score", "sort", i1 + " desc");
    query("q", "*:*", "fl", i1 + ",score", "sort", i1 + " desc");
    query("q", "*:*", "fl", i1, "fl", "score", "sort", i1 + " desc");
    query("q", "*:*", "fl", "id," + i1, "sort", i1 + " desc");
    query("q", "*:*", "fl", "id", "fl", i1, "sort", i1 + " desc");
    query("q", "*:*", "fl", i1, "fl", "id", "sort", i1 + " desc");
    query("q", "*:*", "fl", "id", "fl", nint, "fl", tint, "sort", i1 + " desc");
    query("q", "*:*", "fl", nint, "fl", "id", "fl", tint, "sort", i1 + " desc");
    handle.put("did", SKIPVAL);
    query("q", "*:*", "fl", "did:[docid]", "sort", i1 + " desc");
    handle.remove("did");
    query("q", "*:*", "fl", "log(" + tlong + "),abs(" + tlong + "),score", "sort", i1 + " desc");
    query("q", "*:*", "fl", "n_*", "sort", i1 + " desc");
    // basic spellcheck testing
    query("q", "toyata", "fl", "id,lowerfilt", "spellcheck", true, "spellcheck.q", "toyata", "qt", "spellCheckCompRH_Direct", "shards.qt", "spellCheckCompRH_Direct");
    // turn off stress... we want to tex max combos in min time
    stress = 0;
    for (int i = 0; i < 25 * RANDOM_MULTIPLIER; i++) {
        String f = fieldNames[random().nextInt(fieldNames.length)];
        // the text field is a really interesting one to facet on (and it's multi-valued too)
        if (random().nextBoolean())
            f = t1;
        // we want a random query and not just *:* so we'll get zero counts in facets also
        // TODO: do a better random query
        String q = random().nextBoolean() ? "*:*" : "id:(1 3 5 7 9 11 13) OR id:[100 TO " + random().nextInt(50) + "]";
        // these should be equivalent
        int nolimit = random().nextBoolean() ? -1 : 10000;
        // if limit==-1, we should always get exact matches
        query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.limit", nolimit, "facet.sort", "count", "facet.mincount", random().nextInt(5), "facet.offset", random().nextInt(10));
        query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.limit", nolimit, "facet.sort", "index", "facet.mincount", random().nextInt(5), "facet.offset", random().nextInt(10));
        // for index sort, we should get exact results for mincount <= 1
        query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.sort", "index", "facet.mincount", random().nextInt(2), "facet.offset", random().nextInt(10), "facet.limit", random().nextInt(11) - 1);
    }
    // restore stress
    stress = backupStress;
    // test faceting multiple things at once
    query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "quick", "facet.query", "all", "facet.query", "*:*", "facet.field", t1);
    // test filter tagging, facet exclusion, and naming (multi-select facet support)
    queryAndCompareUIF("q", "*:*", "rows", 0, "facet", "true", "facet.query", "{!key=myquick}quick", "facet.query", "{!key=myall ex=a}all", "facet.query", "*:*", "facet.field", "{!key=mykey ex=a}" + t1, "facet.field", "{!key=other ex=b}" + t1, "facet.field", "{!key=again ex=a,b}" + t1, "facet.field", t1, "fq", "{!tag=a}id:[1 TO 7]", "fq", "{!tag=b}id:[3 TO 9]");
    queryAndCompareUIF("q", "*:*", "facet", "true", "facet.field", "{!ex=t1}SubjectTerms_mfacet", "fq", "{!tag=t1}SubjectTerms_mfacet:(test 1)", "facet.limit", "10", "facet.mincount", "1");
    // test field that is valid in schema but missing in all shards
    query("q", "*:*", "rows", 100, "facet", "true", "facet.field", missingField, "facet.mincount", 2);
    // test field that is valid in schema and missing in some shards
    query("q", "*:*", "rows", 100, "facet", "true", "facet.field", oddField, "facet.mincount", 2);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "stats_dt");
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", i1);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", nint);
    handle.put("stddev", FUZZY);
    handle.put("sumOfSquares", FUZZY);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_a);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_b);
    handle.remove("stddev");
    handle.remove("sumOfSquares");
    rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!cardinality='true'}" + oddField, "stats.field", "{!cardinality='true'}" + tlong);
    {
        // don't leak variabls
        // long
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(tlong);
        assertNotNull("missing stats", s);
        assertEquals("wrong cardinality", new Long(13), s.getCardinality());
        //
        assertNull("expected null for min", s.getMin());
        assertNull("expected null for mean", s.getMean());
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for stddev", s.getStddev());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getSum());
        // string
        s = rsp.getFieldStatsInfo().get(oddField);
        assertNotNull("missing stats", s);
        assertEquals("wrong cardinality", new Long(1), s.getCardinality());
        //
        assertNull("expected null for min", s.getMin());
        assertNull("expected null for mean", s.getMean());
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for stddev", s.getStddev());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getSum());
    }
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,2,3,4,5'}" + i1);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,20,30,40,98,99,99.9'}" + i1);
    rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1.0,99.999,0.001'}" + tlong);
    {
        // don't leak variabls
        Double[] expectedKeys = new Double[] { 1.0D, 99.999D, 0.001D };
        Double[] expectedVals = new Double[] { 2.0D, 4320.0D, 2.0D };
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(tlong);
        assertNotNull("no stats for " + tlong, s);
        Map<Double, Double> p = s.getPercentiles();
        assertNotNull("no percentils", p);
        assertEquals("insufficient percentiles", expectedKeys.length, p.size());
        Iterator<Double> actualKeys = p.keySet().iterator();
        for (int i = 0; i < expectedKeys.length; i++) {
            Double expectedKey = expectedKeys[i];
            assertTrue("Ran out of actual keys as of : " + i + "->" + expectedKey, actualKeys.hasNext());
            assertEquals(expectedKey, actualKeys.next());
            assertEquals("percentiles are off: " + p.toString(), expectedVals[i], p.get(expectedKey), 1.0D);
        }
        //
        assertNull("expected null for count", s.getMin());
        assertNull("expected null for count", s.getMean());
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for stddev", s.getStddev());
        assertNull("expected null for sum", s.getSum());
    }
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,20,50,80,99'}" + tdate_a);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "fq", "{!tag=nothing}-*:*", "stats.field", "{!key=special_key ex=nothing}stats_dt");
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "f.stats_dt.stats.calcdistinct", "true", "stats.field", "{!key=special_key}stats_dt");
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "f.stats_dt.stats.calcdistinct", "true", "fq", "{!tag=xxx}id:[3 TO 9]", "stats.field", "{!key=special_key}stats_dt", "stats.field", "{!ex=xxx}stats_dt");
    handle.put("stddev", FUZZY);
    handle.put("sumOfSquares", FUZZY);
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", // when comparing with control collection
    "stats.field", "{!lucene key=q_key}" + i1 + "foo_b:true", "stats.field", "{!func key=f_key}sum(" + tlong + "," + i1 + ")");
    query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "stats_dt", "stats.field", i1, "stats.field", tdate_a, "stats.field", tdate_b);
    // only ask for "min" and "mean", explicitly exclude deps of mean, whitebox check shard responses
    try {
        RequestTrackingQueue trackingQueue = new RequestTrackingQueue();
        TrackingShardHandlerFactory.setTrackingQueue(jettys, trackingQueue);
        rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true sum=false mean=true count=false}" + i1);
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1);
        assertNotNull("no stats for " + i1, s);
        //
        assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D);
        assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D);
        //
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for stddev", s.getStddev());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getPercentiles());
        assertNull("expected null for cardinality", s.getCardinality());
        // sanity check deps relationship
        for (Stat dep : EnumSet.of(Stat.sum, Stat.count)) {
            assertTrue("Purpose of this test is to ensure that asking for some stats works even when the deps " + "of those stats are explicitly excluded -- but the expected dep relationshp is no longer valid. " + "ie: who changed the code and didn't change this test?, expected: " + dep, Stat.mean.getDistribDeps().contains(dep));
        }
        // check our shard requests & responses - ensure we didn't get unneccessary stats from every shard
        int numStatsShardRequests = 0;
        EnumSet<Stat> shardStatsExpected = EnumSet.of(Stat.min, Stat.sum, Stat.count);
        for (List<ShardRequestAndParams> shard : trackingQueue.getAllRequests().values()) {
            for (ShardRequestAndParams shardReq : shard) {
                if (shardReq.params.getBool(StatsParams.STATS, false)) {
                    numStatsShardRequests++;
                    for (ShardResponse shardRsp : shardReq.sreq.responses) {
                        NamedList<Object> shardStats = ((NamedList<NamedList<NamedList<Object>>>) shardRsp.getSolrResponse().getResponse().get("stats")).get("stats_fields").get(i1);
                        assertNotNull("no stard stats for " + i1, shardStats);
                        //
                        for (Map.Entry<String, Object> entry : shardStats) {
                            Stat found = Stat.forName(entry.getKey());
                            assertNotNull("found shardRsp stat key we were not expecting: " + entry, found);
                            assertTrue("found stat we were not expecting: " + entry, shardStatsExpected.contains(found));
                        }
                    }
                }
            }
        }
        assertTrue("did't see any stats=true shard requests", 0 < numStatsShardRequests);
    } finally {
        TrackingShardHandlerFactory.setTrackingQueue(jettys, null);
    }
    // only ask for "min", "mean" and "stddev",
    rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true mean=true stddev=true}" + i1);
    {
        // don't leak variables 
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1);
        assertNotNull("no stats for " + i1, s);
        //
        assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D);
        assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D);
        assertEquals("wrong stddev", 1271.76215D, (Double) s.getStddev(), 0.0001D);
        //
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getPercentiles());
        assertNull("expected null for cardinality", s.getCardinality());
    }
    // request stats, but disable them all via param refs
    rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "doMin", "false", "stats.field", "{!min=$doMin}" + i1);
    {
        // don't leak variables 
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1);
        // stats section should exist, even though stats should be null
        assertNotNull("no stats for " + i1, s);
        //
        assertNull("expected null for min", s.getMin());
        assertNull("expected null for mean", s.getMean());
        assertNull("expected null for stddev", s.getStddev());
        //
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getPercentiles());
        assertNull("expected null for cardinality", s.getCardinality());
    }
    final String[] stats = new String[] { "min", "max", "sum", "sumOfSquares", "stddev", "mean", "missing", "count" };
    // ask for arbitrary pairs of stats
    for (String stat1 : stats) {
        for (String stat2 : stats) {
            // NOTE: stat1 might equal stat2 - good edge case to test for
            rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!" + stat1 + "=true " + stat2 + "=true}" + i1);
            final List<String> statsExpected = new ArrayList<String>(2);
            statsExpected.add(stat1);
            if (!stat1.equals(stat2)) {
                statsExpected.add(stat2);
            }
            // ignore the FieldStatsInfo convinience class, and look directly at the NamedList
            // so we don't need any sort of crazy reflection
            NamedList<Object> svals = ((NamedList<NamedList<NamedList<Object>>>) rsp.getResponse().get("stats")).get("stats_fields").get(i1);
            assertNotNull("no stats for field " + i1, svals);
            assertEquals("wrong quantity of stats", statsExpected.size(), svals.size());
            for (String s : statsExpected) {
                assertNotNull("stat shouldn't be null: " + s, svals.get(s));
                assertTrue("stat should be a Number: " + s + " -> " + svals.get(s).getClass(), svals.get(s) instanceof Number);
                // some loose assertions since we're iterating over various stats
                if (svals.get(s) instanceof Double) {
                    Double val = (Double) svals.get(s);
                    assertFalse("stat shouldn't be NaN: " + s, val.isNaN());
                    assertFalse("stat shouldn't be Inf: " + s, val.isInfinite());
                    assertFalse("stat shouldn't be 0: " + s, val.equals(0.0D));
                } else {
                    // count or missing
                    assertTrue("stat should be count of missing: " + s, ("count".equals(s) || "missing".equals(s)));
                    assertTrue("stat should be a Long: " + s + " -> " + svals.get(s).getClass(), svals.get(s) instanceof Long);
                    Long val = (Long) svals.get(s);
                    assertFalse("stat shouldn't be 0: " + s, val.equals(0L));
                }
            }
        }
    }
    // all of these diff ways of asking for min & calcdistinct should have the same result
    for (SolrParams p : new SolrParams[] { params("stats.field", "{!min=true calcdistinct=true}" + i1), params("stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true calcdistinct=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), params("stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), params("yes", "true", "stats.field", "{!min=$yes countDistinct=$yes distinctValues=$yes}" + i1) }) {
        rsp = query(SolrParams.wrapDefaults(p, params("q", "*:*", "sort", i1 + " desc", "stats", "true")));
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1);
        assertNotNull(p + " no stats for " + i1, s);
        //
        assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D);
        assertEquals(p + " wrong calcDistinct", new Long(13), s.getCountDistinct());
        assertNotNull(p + " expected non-null list for distinct vals", s.getDistinctValues());
        assertEquals(p + " expected list for distinct vals", 13, s.getDistinctValues().size());
        //
        assertNull(p + " expected null for mean", s.getMean());
        assertNull(p + " expected null for count", s.getCount());
        assertNull(p + " expected null for max", s.getMax());
        assertNull(p + " expected null for missing", s.getMissing());
        assertNull(p + " expected null for stddev", s.getStddev());
        assertNull(p + " expected null for sum", s.getSum());
        assertNull(p + " expected null for percentiles", s.getPercentiles());
        assertNull(p + " expected null for cardinality", s.getCardinality());
    }
    // all of these diff ways of excluding calcdistinct should have the same result
    for (SolrParams p : new SolrParams[] { params("stats.field", "{!min=true calcdistinct=false}" + i1), params("stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true calcdistinct=false}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true countDistinct=false distinctValues=false}" + i1) }) {
        rsp = query(SolrParams.wrapDefaults(p, params("q", "*:*", "sort", i1 + " desc", "stats", "true")));
        FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1);
        assertNotNull(p + " no stats for " + i1, s);
        //
        assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D);
        //
        assertNull(p + " expected null for calcDistinct", s.getCountDistinct());
        assertNull(p + " expected null for distinct vals", s.getDistinctValues());
        //
        assertNull(p + " expected null for mean", s.getMean());
        assertNull(p + " expected null for count", s.getCount());
        assertNull(p + " expected null for max", s.getMax());
        assertNull(p + " expected null for missing", s.getMissing());
        assertNull(p + " expected null for stddev", s.getStddev());
        assertNull(p + " expected null for sum", s.getSum());
        assertNull(p + " expected null for percentiles", s.getPercentiles());
        assertNull(p + " expected null for cardinality", s.getCardinality());
    }
    // this field doesn't exist in any doc in the result set.
    // ensure we get expected values for the stats we ask for, but null for the stats
    rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true mean=true stddev=true}does_not_exist_i");
    {
        // don't leak variables 
        FieldStatsInfo s = rsp.getFieldStatsInfo().get("does_not_exist_i");
        assertNotNull("no stats for bogus field", s);
        // things we explicit expect because we asked for them
        // NOTE: min is expected to be null even though requested because of no values
        assertEquals("wrong min", null, s.getMin());
        assertTrue("mean should be NaN", ((Double) s.getMean()).isNaN());
        assertEquals("wrong stddev", 0.0D, (Double) s.getStddev(), 0.0D);
        // things that we didn't ask for, so they better be null
        assertNull("expected null for count", s.getCount());
        assertNull("expected null for calcDistinct", s.getCountDistinct());
        assertNull("expected null for distinct vals", s.getDistinctValues());
        assertNull("expected null for max", s.getMax());
        assertNull("expected null for missing", s.getMissing());
        assertNull("expected null for sum", s.getSum());
        assertNull("expected null for percentiles", s.getPercentiles());
        assertNull("expected null for cardinality", s.getCardinality());
    }
    // look at stats on non numeric fields
    //
    // not all stats are supported on every field type, so some of these permutations will 
    // result in no stats being computed but this at least lets us sanity check that for each 
    // of these field+stats(s) combinations we get consistent results between the distribted 
    // request and the single node situation.
    //
    // NOTE: percentiles excluded because it doesn't support simple 'true/false' syntax
    // (and since it doesn't work for non-numerics anyway, we aren't missing any coverage here)
    EnumSet<Stat> allStats = EnumSet.complementOf(EnumSet.of(Stat.percentiles));
    int numTotalStatQueries = 0;
    // don't go overboard, just do all permutations of 1 or 2 stat params, for each field & query
    final int numStatParamsAtOnce = 2;
    for (int numParams = 1; numParams <= numStatParamsAtOnce; numParams++) {
        for (EnumSet<Stat> set : new StatSetCombinations(numParams, allStats)) {
            for (String field : new String[] { "foo_f", i1, tlong, tdate_a, oddField, "foo_sev_enum", // fields that no doc has any value in
            "bogus___s", "bogus___f", "bogus___i", "bogus___tdt", "bogus___sev_enum" }) {
                for (String q : new String[] { // all docs
                "*:*", // no docs
                "bogus___s:bogus", // 0 or 1 doc...
                "id:" + random().nextInt(50), "id:" + random().nextInt(50), "id:" + random().nextInt(100), "id:" + random().nextInt(100), "id:" + random().nextInt(200) }) {
                    // EnumSets use natural ordering, we want to randomize the order of the params
                    List<Stat> combo = new ArrayList<Stat>(set);
                    Collections.shuffle(combo, random());
                    StringBuilder paras = new StringBuilder("{!key=k ");
                    for (Stat stat : combo) {
                        paras.append(stat + "=true ");
                    }
                    paras.append("}").append(field);
                    numTotalStatQueries++;
                    rsp = query("q", q, "rows", "0", "stats", "true", "stats.field", paras.toString());
                    // simple assert, mostly relying on comparison with single shard
                    FieldStatsInfo s = rsp.getFieldStatsInfo().get("k");
                    assertNotNull(s);
                // TODO: if we had a programatic way to determine what stats are supported 
                // by what field types, we could make more confident asserts here.
                }
            }
        }
    }
    handle.remove("stddev");
    handle.remove("sumOfSquares");
    assertEquals("Sanity check failed: either test broke, or test changed, or you adjusted Stat enum" + " (adjust constant accordingly if intentional)", 5082, numTotalStatQueries);
    /*** TODO: the failure may come back in "exception"
    try {
      // test error produced for field that is invalid for schema
      query("q","*:*", "rows",100, "facet","true", "facet.field",invalidField, "facet.mincount",2);
      TestCase.fail("SolrServerException expected for invalid field that is not in schema");
    } catch (SolrServerException ex) {
      // expected
    }
    ***/
    // Try to get better coverage for refinement queries by turning off over requesting.
    // This makes it much more likely that we may not get the top facet values and hence
    // we turn of that checking.
    handle.put("facet_fields", SKIPVAL);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.limit", 5, "facet.shard.limit", 5);
    // check a complex key name
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "{!key='$a b/c \\' \\} foo'}" + t1, "facet.limit", 5, "facet.shard.limit", 5);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "{!key='$a'}" + t1, "facet.limit", 5, "facet.shard.limit", 5);
    handle.remove("facet_fields");
    // Make sure there is no macro expansion for field values
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5, "expandMacros", "true");
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5, "expandMacros", "false");
    // Macro expansion should still work for the parameters
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "${foo}", "f.${foo}.mincount", 1, "foo", s1);
    query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "${foo}", "f.${foo}.mincount", 1, "foo", s1, "expandMacros", "true");
    // don't blow up.
    if (clients.size() >= 2) {
        index(id, 100, i1, 107, t1, "oh no, a duplicate!");
        for (int i = 0; i < clients.size(); i++) {
            index_specific(i, id, 100, i1, 107, t1, "oh no, a duplicate!");
        }
        commit();
        query("q", "duplicate", "hl", "true", "hl.fl", t1);
        query("q", "fox duplicate horses", "hl", "true", "hl.fl", t1);
        query("q", "*:*", "rows", 100);
    }
    // Also see TestRemoteStreaming#testQtUpdateFails()
    try {
        ignoreException("isShard is only acceptable");
    // query("q","*:*","shards.qt","/update","stream.body","<delete><query>*:*</query></delete>");
    // fail();
    } catch (SolrException e) {
    //expected
    }
    unIgnoreException("isShard is only acceptable");
    // test debugging
    // handle.put("explain", UNORDERED);
    // internal docids differ, idf differs w/o global idf
    handle.put("explain", SKIPVAL);
    handle.put("debug", UNORDERED);
    handle.put("time", SKIPVAL);
    //track is not included in single node search
    handle.put("track", SKIP);
    query("q", "now their fox sat had put", "fl", "*,score", CommonParams.DEBUG_QUERY, "true");
    query("q", "id:[1 TO 5]", CommonParams.DEBUG_QUERY, "true");
    query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.TIMING);
    query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.RESULTS);
    query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.QUERY);
    // SOLR-6545, wild card field list
    indexr(id, "19", "text", "d", "cat_a_sS", "1", t1, "2");
    commit();
    rsp = query("q", "id:19", "fl", "id", "fl", "*a_sS");
    assertFieldValues(rsp.getResults(), "id", 19);
    rsp = query("q", "id:19", "fl", "id," + t1 + ",cat*");
    assertFieldValues(rsp.getResults(), "id", 19);
    // Check Info is added to for each shard
    ModifiableSolrParams q = new ModifiableSolrParams();
    q.set("q", "*:*");
    q.set(ShardParams.SHARDS_INFO, true);
    setDistributedParams(q);
    rsp = queryServer(q);
    NamedList<?> sinfo = (NamedList<?>) rsp.getResponse().get(ShardParams.SHARDS_INFO);
    String shards = getShardsString();
    int cnt = StringUtils.countMatches(shards, ",") + 1;
    assertNotNull("missing shard info", sinfo);
    assertEquals("should have an entry for each shard [" + sinfo + "] " + shards, cnt, sinfo.size());
    // test shards.tolerant=true
    for (int numDownServers = 0; numDownServers < jettys.size() - 1; numDownServers++) {
        List<JettySolrRunner> upJettys = new ArrayList<>(jettys);
        List<SolrClient> upClients = new ArrayList<>(clients);
        List<JettySolrRunner> downJettys = new ArrayList<>();
        List<String> upShards = new ArrayList<>(Arrays.asList(shardsArr));
        for (int i = 0; i < numDownServers; i++) {
            // shut down some of the jettys
            int indexToRemove = r.nextInt(upJettys.size());
            JettySolrRunner downJetty = upJettys.remove(indexToRemove);
            upClients.remove(indexToRemove);
            upShards.remove(indexToRemove);
            ChaosMonkey.stop(downJetty);
            downJettys.add(downJetty);
        }
        queryPartialResults(upShards, upClients, "q", "*:*", "facet", "true", "facet.field", t1, "facet.field", t1, "facet.limit", 5, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true");
        queryPartialResults(upShards, upClients, "q", "*:*", "facet", "true", "facet.query", i1 + ":[1 TO 50]", "facet.query", i1 + ":[1 TO 50]", ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true");
        // test group query
        queryPartialResults(upShards, upClients, "q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc", CommonParams.TIME_ALLOWED, 1, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true");
        queryPartialResults(upShards, upClients, "q", "*:*", "stats", "true", "stats.field", i1, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true");
        queryPartialResults(upShards, upClients, "q", "toyata", "spellcheck", "true", "spellcheck.q", "toyata", "qt", "spellCheckCompRH_Direct", "shards.qt", "spellCheckCompRH_Direct", ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true");
        // restart the jettys
        for (JettySolrRunner downJetty : downJettys) {
            ChaosMonkey.start(downJetty);
        }
    }
    // This index has the same number for every field
    // TODO: This test currently fails because debug info is obtained only
    // on shards with matches.
    // query("q","matchesnothing","fl","*,score", "debugQuery", "true");
    // Thread.sleep(10000000000L);
    // delete all docs and test stats request
    del("*:*");
    commit();
    try {
        query("q", "*:*", "stats", "true", "stats.field", "stats_dt", "stats.field", i1, "stats.field", tdate_a, "stats.field", tdate_b, "stats.calcdistinct", "true");
    } catch (HttpSolrClient.RemoteSolrException e) {
        if (e.getMessage().startsWith("java.lang.NullPointerException")) {
            fail("NullPointerException with stats request on empty index");
        } else {
            throw e;
        }
    }
    String fieldName = "severity";
    indexr("id", "1", fieldName, "Not Available");
    indexr("id", "2", fieldName, "Low");
    indexr("id", "3", fieldName, "Medium");
    indexr("id", "4", fieldName, "High");
    indexr("id", "5", fieldName, "Critical");
    commit();
    rsp = query("q", "*:*", "stats", "true", "stats.field", fieldName);
    assertEquals(new EnumFieldValue(0, "Not Available"), rsp.getFieldStatsInfo().get(fieldName).getMin());
    query("q", "*:*", "stats", "true", "stats.field", fieldName, StatsParams.STATS_CALC_DISTINCT, "true");
    assertEquals(new EnumFieldValue(4, "Critical"), rsp.getFieldStatsInfo().get(fieldName).getMax());
    // this is stupid, but stats.facet doesn't garuntee order
    handle.put("severity", UNORDERED);
    query("q", "*:*", "stats", "true", "stats.field", fieldName, "stats.facet", fieldName);
}
Also used : RequestTrackingQueue(org.apache.solr.handler.component.TrackingShardHandlerFactory.RequestTrackingQueue) ArrayList(java.util.ArrayList) EnumFieldValue(org.apache.solr.common.EnumFieldValue) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) ShardRequestAndParams(org.apache.solr.handler.component.TrackingShardHandlerFactory.ShardRequestAndParams) ShardResponse(org.apache.solr.handler.component.ShardResponse) HttpSolrClient(org.apache.solr.client.solrj.impl.HttpSolrClient) Stat(org.apache.solr.handler.component.StatsField.Stat) SolrClient(org.apache.solr.client.solrj.SolrClient) HttpSolrClient(org.apache.solr.client.solrj.impl.HttpSolrClient) Iterator(java.util.Iterator) SolrException(org.apache.solr.common.SolrException) StatSetCombinations(org.apache.solr.handler.component.StatsComponentTest.StatSetCombinations) NamedList(org.apache.solr.common.util.NamedList) JettySolrRunner(org.apache.solr.client.solrj.embedded.JettySolrRunner) FieldStatsInfo(org.apache.solr.client.solrj.response.FieldStatsInfo) QueryResponse(org.apache.solr.client.solrj.response.QueryResponse) SolrQueryResponse(org.apache.solr.response.SolrQueryResponse) SolrParams(org.apache.solr.common.params.SolrParams) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) Map(java.util.Map) Test(org.junit.Test)

Aggregations

FieldStatsInfo (org.apache.solr.client.solrj.response.FieldStatsInfo)12 QueryResponse (org.apache.solr.client.solrj.response.QueryResponse)9 PivotField (org.apache.solr.client.solrj.response.PivotField)8 ModifiableSolrParams (org.apache.solr.common.params.ModifiableSolrParams)7 SolrParams (org.apache.solr.common.params.SolrParams)6 Test (org.junit.Test)4 HttpSolrClient (org.apache.solr.client.solrj.impl.HttpSolrClient)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 SolrClient (org.apache.solr.client.solrj.SolrClient)2 ErrorTrackingConcurrentUpdateSolrClient (org.apache.solr.client.solrj.embedded.SolrExampleStreamingTest.ErrorTrackingConcurrentUpdateSolrClient)2 SolrInputDocument (org.apache.solr.common.SolrInputDocument)2 NamedList (org.apache.solr.common.util.NamedList)2 StringContains.containsString (org.junit.internal.matchers.StringContains.containsString)2 Iterator (java.util.Iterator)1 Map (java.util.Map)1 JettySolrRunner (org.apache.solr.client.solrj.embedded.JettySolrRunner)1 EnumFieldValue (org.apache.solr.common.EnumFieldValue)1 SolrDocumentList (org.apache.solr.common.SolrDocumentList)1 SolrException (org.apache.solr.common.SolrException)1