Search in sources :

Example 21 with SyntaxError

use of org.apache.solr.search.SyntaxError in project lucene-solr by apache.

the class LegacyFacet method parseParams.

protected void parseParams(String type, String param) {
    facetValue = param;
    key = param;
    try {
        localParams = QueryParsing.getLocalParams(param, orig);
        if (localParams == null) {
            params = orig;
            required = new RequiredSolrParams(params);
            // setupStats();
            return;
        }
        params = SolrParams.wrapDefaults(localParams, orig);
        required = new RequiredSolrParams(params);
        // remove local params unless it's a query
        if (type != FacetParams.FACET_QUERY) {
            facetValue = localParams.get(CommonParams.VALUE);
        }
        // reset set the default key now that localParams have been removed
        key = facetValue;
        // allow explicit set of the key
        key = localParams.get(CommonParams.OUTPUT_KEY, key);
    // setupStats();
    } catch (SyntaxError e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }
}
Also used : RequiredSolrParams(org.apache.solr.common.params.RequiredSolrParams) SyntaxError(org.apache.solr.search.SyntaxError) SolrException(org.apache.solr.common.SolrException)

Example 22 with SyntaxError

use of org.apache.solr.search.SyntaxError in project lucene-solr by apache.

the class PivotFacetProcessor method addPivotQueriesAndRanges.

/**
   * Add facet.queries and facet.ranges to the pivot response if needed
   * 
   * @param pivot
   *          Pivot in which to inject additional data
   * @param params
   *          Query parameters.
   * @param docs
   *          DocSet of the current pivot to use for computing sub-counts
   * @param facetQueries
   *          Tagged facet queries should have to be included, must not be null
   * @param facetRanges
   *          Taged facet ranges should have to be included, must not be null
   * @throws IOException
   *           If searcher has issues finding numDocs.
   */
protected void addPivotQueriesAndRanges(NamedList<Object> pivot, SolrParams params, DocSet docs, List<FacetComponent.FacetBase> facetQueries, List<RangeFacetRequest> facetRanges) throws IOException {
    assert null != facetQueries;
    assert null != facetRanges;
    if (!facetQueries.isEmpty()) {
        SimpleFacets facets = new SimpleFacets(req, docs, params);
        NamedList<Integer> res = new SimpleOrderedMap<>();
        for (FacetComponent.FacetBase facetQuery : facetQueries) {
            try {
                ParsedParams parsed = getParsedParams(params, docs, facetQuery);
                facets.getFacetQueryCount(parsed, res);
            } catch (SyntaxError e) {
                throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid " + FacetParams.FACET_QUERY + " (" + facetQuery.facetStr + ") cause: " + e.getMessage(), e);
            }
        }
        pivot.add(PivotListEntry.QUERIES.getName(), res);
    }
    if (!facetRanges.isEmpty()) {
        RangeFacetProcessor rangeFacetProcessor = new RangeFacetProcessor(req, docs, params, null);
        NamedList<Object> resOuter = new SimpleOrderedMap<>();
        for (RangeFacetRequest rangeFacet : facetRanges) {
            try {
                rangeFacetProcessor.getFacetRangeCounts(rangeFacet, resOuter);
            } catch (SyntaxError e) {
                throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid " + FacetParams.FACET_RANGE + " (" + rangeFacet.facetStr + ") cause: " + e.getMessage(), e);
            }
        }
        pivot.add(PivotListEntry.RANGES.getName(), resOuter);
    }
}
Also used : SimpleFacets(org.apache.solr.request.SimpleFacets) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) SyntaxError(org.apache.solr.search.SyntaxError) SolrException(org.apache.solr.common.SolrException)

Example 23 with SyntaxError

use of org.apache.solr.search.SyntaxError in project lucene-solr by apache.

the class QueryComponent method process.

/**
   * Actually run the query
   */
@Override
public void process(ResponseBuilder rb) throws IOException {
    LOG.debug("process: {}", rb.req.getParams());
    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
        return;
    }
    SolrIndexSearcher searcher = req.getSearcher();
    StatsCache statsCache = req.getCore().getStatsCache();
    int purpose = params.getInt(ShardParams.SHARDS_PURPOSE, ShardRequest.PURPOSE_GET_TOP_IDS);
    if ((purpose & ShardRequest.PURPOSE_GET_TERM_STATS) != 0) {
        statsCache.returnLocalStats(rb, searcher);
        return;
    }
    // check if we need to update the local copy of global dfs
    if ((purpose & ShardRequest.PURPOSE_SET_TERM_STATS) != 0) {
        // retrieve from request and update local cache
        statsCache.receiveGlobalStats(req);
    }
    SolrQueryResponse rsp = rb.rsp;
    IndexSchema schema = searcher.getSchema();
    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
        SchemaField idField = schema.getUniqueKeyField();
        List<String> idArr = StrUtils.splitSmart(ids, ",", true);
        int[] luceneIds = new int[idArr.size()];
        int docs = 0;
        if (idField.getType().isPointField()) {
            for (int i = 0; i < idArr.size(); i++) {
                int id = searcher.search(idField.getType().getFieldQuery(null, idField, idArr.get(i)), 1).scoreDocs[0].doc;
                if (id >= 0) {
                    luceneIds[docs++] = id;
                }
            }
        } else {
            for (int i = 0; i < idArr.size(); i++) {
                int id = searcher.getFirstMatch(new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
                if (id >= 0)
                    luceneIds[docs++] = id;
            }
        }
        DocListAndSet res = new DocListAndSet();
        res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
        if (rb.isNeedDocSet()) {
            // TODO: create a cache for this!
            List<Query> queries = new ArrayList<>();
            queries.add(rb.getQuery());
            List<Query> filters = rb.getFilters();
            if (filters != null)
                queries.addAll(filters);
            res.docSet = searcher.getDocSet(queries);
        }
        rb.setResults(res);
        ResultContext ctx = new BasicResultContext(rb);
        rsp.addResponse(ctx);
        return;
    }
    // -1 as flag if not set.
    long timeAllowed = params.getLong(CommonParams.TIME_ALLOWED, -1L);
    if (null != rb.getCursorMark() && 0 < timeAllowed) {
        // fundamentally incompatible
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can not search using both " + CursorMarkParams.CURSOR_MARK_PARAM + " and " + CommonParams.TIME_ALLOWED);
    }
    QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req));
    QueryResult result = new QueryResult();
    cmd.setSegmentTerminateEarly(params.getBool(CommonParams.SEGMENT_TERMINATE_EARLY, CommonParams.SEGMENT_TERMINATE_EARLY_DEFAULT));
    if (cmd.getSegmentTerminateEarly()) {
        result.setSegmentTerminatedEarly(Boolean.FALSE);
    }
    //
    // grouping / field collapsing
    //
    GroupingSpecification groupingSpec = rb.getGroupingSpec();
    if (groupingSpec != null) {
        // not supported, silently ignore any segmentTerminateEarly flag
        cmd.setSegmentTerminateEarly(false);
        try {
            boolean needScores = (cmd.getFlags() & SolrIndexSearcher.GET_SCORES) != 0;
            if (params.getBool(GroupParams.GROUP_DISTRIBUTED_FIRST, false)) {
                CommandHandler.Builder topsGroupsActionBuilder = new CommandHandler.Builder().setQueryCommand(cmd).setNeedDocSet(// Order matters here
                false).setIncludeHitCount(true).setSearcher(searcher);
                for (String field : groupingSpec.getFields()) {
                    topsGroupsActionBuilder.addCommandField(new SearchGroupsFieldCommand.Builder().setField(schema.getField(field)).setGroupSort(groupingSpec.getGroupSort()).setTopNGroups(cmd.getOffset() + cmd.getLen()).setIncludeGroupCount(groupingSpec.isIncludeGroupCount()).build());
                }
                CommandHandler commandHandler = topsGroupsActionBuilder.build();
                commandHandler.execute();
                SearchGroupsResultTransformer serializer = new SearchGroupsResultTransformer(searcher);
                rsp.add("firstPhase", commandHandler.processResult(result, serializer));
                rsp.add("totalHitCount", commandHandler.getTotalHitCount());
                rb.setResult(result);
                return;
            } else if (params.getBool(GroupParams.GROUP_DISTRIBUTED_SECOND, false)) {
                CommandHandler.Builder secondPhaseBuilder = new CommandHandler.Builder().setQueryCommand(cmd).setTruncateGroups(groupingSpec.isTruncateGroups() && groupingSpec.getFields().length > 0).setSearcher(searcher);
                int docsToCollect = Grouping.getMax(groupingSpec.getWithinGroupOffset(), groupingSpec.getWithinGroupLimit(), searcher.maxDoc());
                docsToCollect = Math.max(docsToCollect, 1);
                for (String field : groupingSpec.getFields()) {
                    SchemaField schemaField = schema.getField(field);
                    String[] topGroupsParam = params.getParams(GroupParams.GROUP_DISTRIBUTED_TOPGROUPS_PREFIX + field);
                    if (topGroupsParam == null) {
                        topGroupsParam = new String[0];
                    }
                    List<SearchGroup<BytesRef>> topGroups = new ArrayList<>(topGroupsParam.length);
                    for (String topGroup : topGroupsParam) {
                        SearchGroup<BytesRef> searchGroup = new SearchGroup<>();
                        if (!topGroup.equals(TopGroupsShardRequestFactory.GROUP_NULL_VALUE)) {
                            BytesRefBuilder builder = new BytesRefBuilder();
                            schemaField.getType().readableToIndexed(topGroup, builder);
                            searchGroup.groupValue = builder.get();
                        }
                        topGroups.add(searchGroup);
                    }
                    secondPhaseBuilder.addCommandField(new TopGroupsFieldCommand.Builder().setField(schemaField).setGroupSort(groupingSpec.getGroupSort()).setSortWithinGroup(groupingSpec.getSortWithinGroup()).setFirstPhaseGroups(topGroups).setMaxDocPerGroup(docsToCollect).setNeedScores(needScores).setNeedMaxScore(needScores).build());
                }
                for (String query : groupingSpec.getQueries()) {
                    secondPhaseBuilder.addCommandField(new Builder().setDocsToCollect(docsToCollect).setSort(groupingSpec.getGroupSort()).setQuery(query, rb.req).setDocSet(searcher).build());
                }
                CommandHandler commandHandler = secondPhaseBuilder.build();
                commandHandler.execute();
                TopGroupsResultTransformer serializer = new TopGroupsResultTransformer(rb);
                rsp.add("secondPhase", commandHandler.processResult(result, serializer));
                rb.setResult(result);
                return;
            }
            int maxDocsPercentageToCache = params.getInt(GroupParams.GROUP_CACHE_PERCENTAGE, 0);
            boolean cacheSecondPassSearch = maxDocsPercentageToCache >= 1 && maxDocsPercentageToCache <= 100;
            Grouping.TotalCount defaultTotalCount = groupingSpec.isIncludeGroupCount() ? Grouping.TotalCount.grouped : Grouping.TotalCount.ungrouped;
            // this is normally from "rows"
            int limitDefault = cmd.getLen();
            Grouping grouping = new Grouping(searcher, result, cmd, cacheSecondPassSearch, maxDocsPercentageToCache, groupingSpec.isMain());
            grouping.setGroupSort(groupingSpec.getGroupSort()).setWithinGroupSort(groupingSpec.getSortWithinGroup()).setDefaultFormat(groupingSpec.getResponseFormat()).setLimitDefault(limitDefault).setDefaultTotalCount(defaultTotalCount).setDocsPerGroupDefault(groupingSpec.getWithinGroupLimit()).setGroupOffsetDefault(groupingSpec.getWithinGroupOffset()).setGetGroupedDocSet(groupingSpec.isTruncateGroups());
            if (groupingSpec.getFields() != null) {
                for (String field : groupingSpec.getFields()) {
                    grouping.addFieldCommand(field, rb.req);
                }
            }
            if (groupingSpec.getFunctions() != null) {
                for (String groupByStr : groupingSpec.getFunctions()) {
                    grouping.addFunctionCommand(groupByStr, rb.req);
                }
            }
            if (groupingSpec.getQueries() != null) {
                for (String groupByStr : groupingSpec.getQueries()) {
                    grouping.addQueryCommand(groupByStr, rb.req);
                }
            }
            if (rb.isNeedDocList() || rb.isDebug()) {
                // we need a single list of the returned docs
                cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
            }
            grouping.execute();
            if (grouping.isSignalCacheWarning()) {
                rsp.add("cacheWarning", String.format(Locale.ROOT, "Cache limit of %d percent relative to maxdoc has exceeded. Please increase cache size or disable caching.", maxDocsPercentageToCache));
            }
            rb.setResult(result);
            if (grouping.mainResult != null) {
                ResultContext ctx = new BasicResultContext(rb, grouping.mainResult);
                rsp.addResponse(ctx);
                rsp.getToLog().add("hits", grouping.mainResult.matches());
            } else if (!grouping.getCommands().isEmpty()) {
                // Can never be empty since grouping.execute() checks for this.
                rsp.add("grouped", result.groupedResults);
                rsp.getToLog().add("hits", grouping.getCommands().get(0).getMatches());
            }
            return;
        } catch (SyntaxError e) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }
    }
    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);
    ResultContext ctx = new BasicResultContext(rb);
    rsp.addResponse(ctx);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());
    if (!rb.req.getParams().getBool(ShardParams.IS_SHARD, false)) {
        if (null != rb.getNextCursorMark()) {
            rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT, rb.getNextCursorMark().getSerializedTotem());
        }
    }
    if (rb.mergeFieldHandler != null) {
        rb.mergeFieldHandler.handleMergeFields(rb, searcher);
    } else {
        doFieldSortValues(rb, searcher);
    }
    doPrefetch(rb);
}
Also used : BasicResultContext(org.apache.solr.response.BasicResultContext) ResultContext(org.apache.solr.response.ResultContext) Query(org.apache.lucene.search.Query) MatchNoDocsQuery(org.apache.lucene.search.MatchNoDocsQuery) RankQuery(org.apache.solr.search.RankQuery) DocListAndSet(org.apache.solr.search.DocListAndSet) BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) Builder(org.apache.solr.search.grouping.distributed.command.QueryCommand.Builder) ArrayList(java.util.ArrayList) CommandHandler(org.apache.solr.search.grouping.CommandHandler) DocSlice(org.apache.solr.search.DocSlice) BasicResultContext(org.apache.solr.response.BasicResultContext) StatsCache(org.apache.solr.search.stats.StatsCache) QueryResult(org.apache.solr.search.QueryResult) SyntaxError(org.apache.solr.search.SyntaxError) SolrDocumentList(org.apache.solr.common.SolrDocumentList) DocList(org.apache.solr.search.DocList) List(java.util.List) ArrayList(java.util.ArrayList) NamedList(org.apache.solr.common.util.NamedList) GroupingSpecification(org.apache.solr.search.grouping.GroupingSpecification) SearchGroupsResultTransformer(org.apache.solr.search.grouping.distributed.shardresultserializer.SearchGroupsResultTransformer) SolrException(org.apache.solr.common.SolrException) BytesRef(org.apache.lucene.util.BytesRef) SolrQueryResponse(org.apache.solr.response.SolrQueryResponse) BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) SearchGroup(org.apache.lucene.search.grouping.SearchGroup) Grouping(org.apache.solr.search.Grouping) SolrIndexSearcher(org.apache.solr.search.SolrIndexSearcher) Term(org.apache.lucene.index.Term) TopGroupsResultTransformer(org.apache.solr.search.grouping.distributed.shardresultserializer.TopGroupsResultTransformer) SchemaField(org.apache.solr.schema.SchemaField) SolrQueryRequest(org.apache.solr.request.SolrQueryRequest) SolrParams(org.apache.solr.common.params.SolrParams) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) QueryCommand(org.apache.solr.search.QueryCommand) IndexSchema(org.apache.solr.schema.IndexSchema)

Example 24 with SyntaxError

use of org.apache.solr.search.SyntaxError in project lucene-solr by apache.

the class PivotFacetProcessor method process.

/**
   * Processes all of the specified {@link FacetParams#FACET_PIVOT} strings, generating 
   * a complete response tree for each pivot.  The values in this response will either
   * be the complete tree of fields and values for the specified pivot in the local index, 
   * or the requested refinements if the pivot params include the {@link PivotFacet#REFINE_PARAM}
   */
public SimpleOrderedMap<List<NamedList<Object>>> process(String[] pivots) throws IOException {
    if (!rb.doFacets || pivots == null)
        return null;
    // rb._statsInfo may be null if stats=false, ie: refine requests
    // if that's the case, but we need to refine w/stats, then we'll lazy init our 
    // own instance of StatsInfo
    StatsInfo statsInfo = rb._statsInfo;
    SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<>();
    for (String pivotList : pivots) {
        final ParsedParams parsed;
        try {
            parsed = this.parseParams(FacetParams.FACET_PIVOT, pivotList);
        } catch (SyntaxError e) {
            throw new SolrException(ErrorCode.BAD_REQUEST, e);
        }
        List<String> pivotFields = StrUtils.splitSmart(parsed.facetValue, ",", true);
        if (pivotFields.size() < 1) {
            throw new SolrException(ErrorCode.BAD_REQUEST, "Pivot Facet needs at least one field name: " + pivotList);
        } else {
            SolrIndexSearcher searcher = rb.req.getSearcher();
            for (String fieldName : pivotFields) {
                SchemaField sfield = searcher.getSchema().getField(fieldName);
                if (sfield == null) {
                    throw new SolrException(ErrorCode.BAD_REQUEST, "\"" + fieldName + "\" is not a valid field name in pivot: " + pivotList);
                }
            }
        }
        // start by assuming no local params...
        // no local => no refinement
        String refineKey = null;
        // no local => no stats
        List<StatsField> statsFields = Collections.emptyList();
        List<FacetComponent.FacetBase> facetQueries = Collections.emptyList();
        List<RangeFacetRequest> facetRanges = Collections.emptyList();
        if (null != parsed.localParams) {
            // we might be refining..
            refineKey = parsed.localParams.get(PivotFacet.REFINE_PARAM);
            String statsLocalParam = parsed.localParams.get(StatsParams.STATS);
            if (null != refineKey && null != statsLocalParam && null == statsInfo) {
                // we are refining and need to compute stats, 
                // but stats component hasn't inited StatsInfo (because we
                // don't need/want top level stats when refining) so we lazy init
                // our own copy of StatsInfo
                statsInfo = new StatsInfo(rb);
            }
            statsFields = getTaggedStatsFields(statsInfo, statsLocalParam);
            try {
                FacetComponent.FacetContext facetContext = FacetComponent.FacetContext.getFacetContext(req);
                String taggedQueries = parsed.localParams.get(QUERY);
                if (StringUtils.isEmpty(taggedQueries)) {
                    facetQueries = Collections.emptyList();
                } else {
                    List<String> localParamValue = StrUtils.splitSmart(taggedQueries, ',');
                    if (localParamValue.size() > 1) {
                        String msg = QUERY + " local param of " + FacetParams.FACET_PIVOT + "may not include tags separated by a comma - please use a common tag on all " + FacetParams.FACET_QUERY + " params you wish to compute under this pivot";
                        throw new SolrException(ErrorCode.BAD_REQUEST, msg);
                    }
                    taggedQueries = localParamValue.get(0);
                    facetQueries = facetContext.getQueryFacetsForTag(taggedQueries);
                }
                String taggedRanges = parsed.localParams.get(RANGE);
                if (StringUtils.isEmpty(taggedRanges)) {
                    facetRanges = Collections.emptyList();
                } else {
                    List<String> localParamValue = StrUtils.splitSmart(taggedRanges, ',');
                    if (localParamValue.size() > 1) {
                        String msg = RANGE + " local param of " + FacetParams.FACET_PIVOT + "may not include tags separated by a comma - please use a common tag on all " + FacetParams.FACET_RANGE + " params you wish to compute under this pivot";
                        throw new SolrException(ErrorCode.BAD_REQUEST, msg);
                    }
                    taggedRanges = localParamValue.get(0);
                    facetRanges = facetContext.getRangeFacetRequestsForTag(taggedRanges);
                }
            } catch (IllegalStateException e) {
                throw new SolrException(ErrorCode.SERVER_ERROR, "Faceting context not set, cannot calculate pivot values");
            }
        }
        if (null != refineKey) {
            String[] refinementValuesByField = params.getParams(PivotFacet.REFINE_PARAM + refineKey);
            for (String refinements : refinementValuesByField) {
                pivotResponse.addAll(processSingle(pivotFields, refinements, statsFields, parsed, facetQueries, facetRanges));
            }
        } else {
            pivotResponse.addAll(processSingle(pivotFields, null, statsFields, parsed, facetQueries, facetRanges));
        }
    }
    return pivotResponse;
}
Also used : SolrIndexSearcher(org.apache.solr.search.SolrIndexSearcher) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) SchemaField(org.apache.solr.schema.SchemaField) SyntaxError(org.apache.solr.search.SyntaxError) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) NamedList(org.apache.solr.common.util.NamedList) List(java.util.List) SolrException(org.apache.solr.common.SolrException)

Example 25 with SyntaxError

use of org.apache.solr.search.SyntaxError in project lucene-solr by apache.

the class FacetComponent method getFacetCounts.

/**
   * Looks at various Params to determining if any simple Facet Constraint count
   * computations are desired.
   *
   * @see SimpleFacets#getFacetQueryCounts
   * @see SimpleFacets#getFacetFieldCounts
   * @see RangeFacetProcessor#getFacetRangeCounts
   * @see RangeFacetProcessor#getFacetIntervalCounts
   * @see FacetParams#FACET
   * @return a NamedList of Facet Count info or null
   */
public static NamedList<Object> getFacetCounts(SimpleFacets simpleFacets, FacetDebugInfo fdebug) {
    // if someone called this method, benefit of the doubt: assume true
    if (!simpleFacets.getGlobalParams().getBool(FacetParams.FACET, true))
        return null;
    RangeFacetProcessor rangeFacetProcessor = new RangeFacetProcessor(simpleFacets.getRequest(), simpleFacets.getDocsOrig(), simpleFacets.getGlobalParams(), simpleFacets.getResponseBuilder());
    NamedList<Object> counts = new SimpleOrderedMap<>();
    try {
        counts.add(FACET_QUERY_KEY, simpleFacets.getFacetQueryCounts());
        if (fdebug != null) {
            FacetDebugInfo fd = new FacetDebugInfo();
            fd.putInfoItem("action", "field facet");
            fd.setProcessor(simpleFacets.getClass().getSimpleName());
            fdebug.addChild(fd);
            simpleFacets.setFacetDebugInfo(fd);
            final RTimer timer = new RTimer();
            counts.add(FACET_FIELD_KEY, simpleFacets.getFacetFieldCounts());
            long timeElapsed = (long) timer.getTime();
            fd.setElapse(timeElapsed);
        } else {
            counts.add(FACET_FIELD_KEY, simpleFacets.getFacetFieldCounts());
        }
        counts.add(FACET_RANGES_KEY, rangeFacetProcessor.getFacetRangeCounts());
        counts.add(FACET_INTERVALS_KEY, simpleFacets.getFacetIntervalCounts());
        counts.add(SpatialHeatmapFacets.RESPONSE_KEY, simpleFacets.getHeatmapCounts());
    } catch (IOException e) {
        throw new SolrException(ErrorCode.SERVER_ERROR, e);
    } catch (SyntaxError e) {
        throw new SolrException(ErrorCode.BAD_REQUEST, e);
    }
    return counts;
}
Also used : FacetDebugInfo(org.apache.solr.search.facet.FacetDebugInfo) SyntaxError(org.apache.solr.search.SyntaxError) IOException(java.io.IOException) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) RTimer(org.apache.solr.util.RTimer) SolrException(org.apache.solr.common.SolrException)

Aggregations

SyntaxError (org.apache.solr.search.SyntaxError)35 SolrException (org.apache.solr.common.SolrException)23 Query (org.apache.lucene.search.Query)21 QParser (org.apache.solr.search.QParser)12 ArrayList (java.util.ArrayList)10 SchemaField (org.apache.solr.schema.SchemaField)10 BooleanQuery (org.apache.lucene.search.BooleanQuery)8 SolrParams (org.apache.solr.common.params.SolrParams)8 ModifiableSolrParams (org.apache.solr.common.params.ModifiableSolrParams)6 SolrIndexSearcher (org.apache.solr.search.SolrIndexSearcher)6 IOException (java.io.IOException)5 List (java.util.List)5 NamedList (org.apache.solr.common.util.NamedList)5 SimpleOrderedMap (org.apache.solr.common.util.SimpleOrderedMap)5 Map (java.util.Map)4 SolrQueryRequest (org.apache.solr.request.SolrQueryRequest)4 SolrQueryResponse (org.apache.solr.response.SolrQueryResponse)4 FieldType (org.apache.solr.schema.FieldType)4 IndexSchema (org.apache.solr.schema.IndexSchema)4 DocList (org.apache.solr.search.DocList)4