Search in sources :

Example 1 with BoostQueryNode

use of org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode in project lucene-solr by apache.

the class StandardSyntaxParser method Term.

public final QueryNode Term(CharSequence field) throws ParseException {
    Token term, boost = null, fuzzySlop = null, goop1, goop2;
    boolean fuzzy = false;
    boolean regexp = false;
    boolean startInc = false;
    boolean endInc = false;
    QueryNode q = null;
    FieldQueryNode qLower, qUpper;
    float defaultMinSimilarity = org.apache.lucene.search.FuzzyQuery.defaultMinSimilarity;
    switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
        case TERM:
        case REGEXPTERM:
        case NUMBER:
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case TERM:
                    term = jj_consume_token(TERM);
                    q = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                    break;
                case REGEXPTERM:
                    term = jj_consume_token(REGEXPTERM);
                    regexp = true;
                    break;
                case NUMBER:
                    term = jj_consume_token(NUMBER);
                    break;
                default:
                    jj_la1[13] = jj_gen;
                    jj_consume_token(-1);
                    throw new ParseException();
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case FUZZY_SLOP:
                    fuzzySlop = jj_consume_token(FUZZY_SLOP);
                    fuzzy = true;
                    break;
                default:
                    jj_la1[14] = jj_gen;
                    ;
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case CARAT:
                    jj_consume_token(CARAT);
                    boost = jj_consume_token(NUMBER);
                    switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                        case FUZZY_SLOP:
                            fuzzySlop = jj_consume_token(FUZZY_SLOP);
                            fuzzy = true;
                            break;
                        default:
                            jj_la1[15] = jj_gen;
                            ;
                    }
                    break;
                default:
                    jj_la1[16] = jj_gen;
                    ;
            }
            if (fuzzy) {
                float fms = defaultMinSimilarity;
                try {
                    fms = Float.parseFloat(fuzzySlop.image.substring(1));
                } catch (Exception ignored) {
                }
                if (fms < 0.0f) {
                    {
                        if (true)
                            throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS));
                    }
                } else if (fms >= 1.0f && fms != (int) fms) {
                    {
                        if (true)
                            throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_EDITS));
                    }
                }
                q = new FuzzyQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), fms, term.beginColumn, term.endColumn);
            } else if (regexp) {
                String re = term.image.substring(1, term.image.length() - 1);
                q = new RegexpQueryNode(field, re, 0, re.length());
            }
            break;
        case RANGEIN_START:
        case RANGEEX_START:
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case RANGEIN_START:
                    jj_consume_token(RANGEIN_START);
                    startInc = true;
                    break;
                case RANGEEX_START:
                    jj_consume_token(RANGEEX_START);
                    break;
                default:
                    jj_la1[17] = jj_gen;
                    jj_consume_token(-1);
                    throw new ParseException();
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case RANGE_GOOP:
                    goop1 = jj_consume_token(RANGE_GOOP);
                    break;
                case RANGE_QUOTED:
                    goop1 = jj_consume_token(RANGE_QUOTED);
                    break;
                case RANGE_TO:
                    goop1 = jj_consume_token(RANGE_TO);
                    break;
                default:
                    jj_la1[18] = jj_gen;
                    jj_consume_token(-1);
                    throw new ParseException();
            }
            jj_consume_token(RANGE_TO);
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case RANGE_GOOP:
                    goop2 = jj_consume_token(RANGE_GOOP);
                    break;
                case RANGE_QUOTED:
                    goop2 = jj_consume_token(RANGE_QUOTED);
                    break;
                case RANGE_TO:
                    goop2 = jj_consume_token(RANGE_TO);
                    break;
                default:
                    jj_la1[19] = jj_gen;
                    jj_consume_token(-1);
                    throw new ParseException();
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case RANGEIN_END:
                    jj_consume_token(RANGEIN_END);
                    endInc = true;
                    break;
                case RANGEEX_END:
                    jj_consume_token(RANGEEX_END);
                    break;
                default:
                    jj_la1[20] = jj_gen;
                    jj_consume_token(-1);
                    throw new ParseException();
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case CARAT:
                    jj_consume_token(CARAT);
                    boost = jj_consume_token(NUMBER);
                    break;
                default:
                    jj_la1[21] = jj_gen;
                    ;
            }
            if (goop1.kind == RANGE_QUOTED) {
                goop1.image = goop1.image.substring(1, goop1.image.length() - 1);
            }
            if (goop2.kind == RANGE_QUOTED) {
                goop2.image = goop2.image.substring(1, goop2.image.length() - 1);
            }
            qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(goop1.image), goop1.beginColumn, goop1.endColumn);
            qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(goop2.image), goop2.beginColumn, goop2.endColumn);
            q = new TermRangeQueryNode(qLower, qUpper, startInc ? true : false, endInc ? true : false);
            break;
        case QUOTED:
            term = jj_consume_token(QUOTED);
            q = new QuotedFieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image.substring(1, term.image.length() - 1)), term.beginColumn + 1, term.endColumn - 1);
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case FUZZY_SLOP:
                    fuzzySlop = jj_consume_token(FUZZY_SLOP);
                    break;
                default:
                    jj_la1[22] = jj_gen;
                    ;
            }
            switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                case CARAT:
                    jj_consume_token(CARAT);
                    boost = jj_consume_token(NUMBER);
                    break;
                default:
                    jj_la1[23] = jj_gen;
                    ;
            }
            int phraseSlop = 0;
            if (fuzzySlop != null) {
                try {
                    phraseSlop = (int) Float.parseFloat(fuzzySlop.image.substring(1));
                    q = new SlopQueryNode(q, phraseSlop);
                } catch (Exception ignored) {
                /* Should this be handled somehow? (defaults to "no PhraseSlop", if
           * slop number is invalid)
           */
                }
            }
            break;
        default:
            jj_la1[24] = jj_gen;
            jj_consume_token(-1);
            throw new ParseException();
    }
    if (boost != null) {
        float f = (float) 1.0;
        try {
            f = Float.parseFloat(boost.image);
            // avoid boosting null queries, such as those caused by stop words
            if (q != null) {
                q = new BoostQueryNode(q, f);
            }
        } catch (Exception ignored) {
        /* Should this be handled somehow? (defaults to "no boost", if
           * boost number is invalid)
           */
        }
    }
    {
        if (true)
            return q;
    }
    throw new Error("Missing return statement in function");
}
Also used : FuzzyQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) SlopQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) RegexpQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode) QueryNodeParseException(org.apache.lucene.queryparser.flexible.core.QueryNodeParseException) FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) RegexpQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode) GroupQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode) AndQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.AndQueryNode) FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) TermRangeQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode) BooleanQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) FuzzyQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode) ModifierQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) SlopQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode) OrQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.OrQueryNode) QueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QueryNode) TermRangeQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode) QueryNodeParseException(org.apache.lucene.queryparser.flexible.core.QueryNodeParseException) MessageImpl(org.apache.lucene.queryparser.flexible.messages.MessageImpl)

Example 2 with BoostQueryNode

use of org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode in project lucene-solr by apache.

the class StandardSyntaxParser method Clause.

public final QueryNode Clause(CharSequence field) throws ParseException {
    QueryNode q;
    Token fieldToken = null, boost = null, operator = null, term = null;
    FieldQueryNode qLower, qUpper;
    boolean lowerInclusive, upperInclusive;
    boolean group = false;
    if (jj_2_2(3)) {
        fieldToken = jj_consume_token(TERM);
        switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
            case OP_COLON:
            case OP_EQUAL:
                switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                    case OP_COLON:
                        jj_consume_token(OP_COLON);
                        break;
                    case OP_EQUAL:
                        jj_consume_token(OP_EQUAL);
                        break;
                    default:
                        jj_la1[5] = jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                field = EscapeQuerySyntaxImpl.discardEscapeChar(fieldToken.image);
                q = Term(field);
                break;
            case OP_LESSTHAN:
            case OP_LESSTHANEQ:
            case OP_MORETHAN:
            case OP_MORETHANEQ:
                switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                    case OP_LESSTHAN:
                        operator = jj_consume_token(OP_LESSTHAN);
                        break;
                    case OP_LESSTHANEQ:
                        operator = jj_consume_token(OP_LESSTHANEQ);
                        break;
                    case OP_MORETHAN:
                        operator = jj_consume_token(OP_MORETHAN);
                        break;
                    case OP_MORETHANEQ:
                        operator = jj_consume_token(OP_MORETHANEQ);
                        break;
                    default:
                        jj_la1[6] = jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                field = EscapeQuerySyntaxImpl.discardEscapeChar(fieldToken.image);
                switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                    case TERM:
                        term = jj_consume_token(TERM);
                        break;
                    case QUOTED:
                        term = jj_consume_token(QUOTED);
                        break;
                    case NUMBER:
                        term = jj_consume_token(NUMBER);
                        break;
                    default:
                        jj_la1[7] = jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                if (term.kind == QUOTED) {
                    term.image = term.image.substring(1, term.image.length() - 1);
                }
                switch(operator.kind) {
                    case OP_LESSTHAN:
                        lowerInclusive = true;
                        upperInclusive = false;
                        qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn);
                        qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                        break;
                    case OP_LESSTHANEQ:
                        lowerInclusive = true;
                        upperInclusive = true;
                        qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn);
                        qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                        break;
                    case OP_MORETHAN:
                        lowerInclusive = false;
                        upperInclusive = true;
                        qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                        qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn);
                        break;
                    case OP_MORETHANEQ:
                        lowerInclusive = true;
                        upperInclusive = true;
                        qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                        qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn);
                        break;
                    default:
                        {
                            if (true)
                                throw new Error("Unhandled case: operator=" + operator.toString());
                        }
                }
                q = new TermRangeQueryNode(qLower, qUpper, lowerInclusive, upperInclusive);
                break;
            default:
                jj_la1[8] = jj_gen;
                jj_consume_token(-1);
                throw new ParseException();
        }
    } else {
        switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
            case LPAREN:
            case QUOTED:
            case TERM:
            case REGEXPTERM:
            case RANGEIN_START:
            case RANGEEX_START:
            case NUMBER:
                if (jj_2_1(2)) {
                    fieldToken = jj_consume_token(TERM);
                    switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                        case OP_COLON:
                            jj_consume_token(OP_COLON);
                            break;
                        case OP_EQUAL:
                            jj_consume_token(OP_EQUAL);
                            break;
                        default:
                            jj_la1[9] = jj_gen;
                            jj_consume_token(-1);
                            throw new ParseException();
                    }
                    field = EscapeQuerySyntaxImpl.discardEscapeChar(fieldToken.image);
                } else {
                    ;
                }
                switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                    case QUOTED:
                    case TERM:
                    case REGEXPTERM:
                    case RANGEIN_START:
                    case RANGEEX_START:
                    case NUMBER:
                        q = Term(field);
                        break;
                    case LPAREN:
                        jj_consume_token(LPAREN);
                        q = Query(field);
                        jj_consume_token(RPAREN);
                        switch((jj_ntk == -1) ? jj_ntk() : jj_ntk) {
                            case CARAT:
                                jj_consume_token(CARAT);
                                boost = jj_consume_token(NUMBER);
                                break;
                            default:
                                jj_la1[10] = jj_gen;
                                ;
                        }
                        group = true;
                        break;
                    default:
                        jj_la1[11] = jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                break;
            default:
                jj_la1[12] = jj_gen;
                jj_consume_token(-1);
                throw new ParseException();
        }
    }
    if (boost != null) {
        float f = (float) 1.0;
        try {
            f = Float.parseFloat(boost.image);
            // avoid boosting null queries, such as those caused by stop words
            if (q != null) {
                q = new BoostQueryNode(q, f);
            }
        } catch (Exception ignored) {
        /* Should this be handled somehow? (defaults to "no boost", if
             * boost number is invalid)
             */
        }
    }
    if (group) {
        q = new GroupQueryNode(q);
    }
    {
        if (true)
            return q;
    }
    throw new Error("Missing return statement in function");
}
Also used : FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) RegexpQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode) GroupQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode) AndQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.AndQueryNode) FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) TermRangeQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode) BooleanQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) FuzzyQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode) ModifierQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) SlopQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode) OrQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.OrQueryNode) QueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QueryNode) TermRangeQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode) QueryNodeParseException(org.apache.lucene.queryparser.flexible.core.QueryNodeParseException) GroupQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode) QueryNodeParseException(org.apache.lucene.queryparser.flexible.core.QueryNodeParseException)

Example 3 with BoostQueryNode

use of org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode in project lucene-solr by apache.

the class BoostQueryNodeBuilder method build.

@Override
public Query build(QueryNode queryNode) throws QueryNodeException {
    BoostQueryNode boostNode = (BoostQueryNode) queryNode;
    QueryNode child = boostNode.getChild();
    if (child == null) {
        return null;
    }
    Query query = (Query) child.getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID);
    return new BoostQuery(query, boostNode.getValue());
}
Also used : BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) Query(org.apache.lucene.search.Query) BoostQuery(org.apache.lucene.search.BoostQuery) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) QueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QueryNode) BoostQuery(org.apache.lucene.search.BoostQuery)

Example 4 with BoostQueryNode

use of org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode in project lucene-solr by apache.

the class BoostQueryNodeProcessor method postProcessNode.

@Override
protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
    if (node instanceof FieldableNode && (node.getParent() == null || !(node.getParent() instanceof FieldableNode))) {
        FieldableNode fieldNode = (FieldableNode) node;
        QueryConfigHandler config = getQueryConfigHandler();
        if (config != null) {
            CharSequence field = fieldNode.getField();
            FieldConfig fieldConfig = config.getFieldConfig(StringUtils.toString(field));
            if (fieldConfig != null) {
                Float boost = fieldConfig.get(ConfigurationKeys.BOOST);
                if (boost != null) {
                    return new BoostQueryNode(node, boost);
                }
            }
        }
    }
    return node;
}
Also used : FieldableNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldableNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) FieldConfig(org.apache.lucene.queryparser.flexible.core.config.FieldConfig) QueryConfigHandler(org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler)

Example 5 with BoostQueryNode

use of org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode in project lucene-skos by behas.

the class SKOSQueryNodeProcessor method postProcessNode.

@Override
protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
    if (node instanceof TextableQueryNode && !(node instanceof WildcardQueryNode) && !(node instanceof FuzzyQueryNode) && !(node instanceof RegexpQueryNode) && !(node.getParent() instanceof RangeQueryNode)) {
        FieldQueryNode fieldNode = ((FieldQueryNode) node);
        String text = fieldNode.getTextAsString();
        String field = fieldNode.getFieldAsString();
        CachingTokenFilter buffer = null;
        PositionIncrementAttribute posIncrAtt = null;
        int numTokens = 0;
        int positionCount = 0;
        boolean severalTokensAtSamePosition = false;
        try {
            try (TokenStream source = this.analyzer.tokenStream(field, text)) {
                buffer = new CachingTokenFilter(source);
                buffer.reset();
                if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
                    posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
                }
                try {
                    while (buffer.incrementToken()) {
                        numTokens++;
                        int positionIncrement = (posIncrAtt != null) ? posIncrAtt.getPositionIncrement() : 1;
                        if (positionIncrement != 0) {
                            positionCount += positionIncrement;
                        } else {
                            severalTokensAtSamePosition = true;
                        }
                    }
                } catch (IOException e) {
                // ignore
                }
                // rewind the buffer stream
                // will never through on subsequent reset calls
                buffer.reset();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            if (!buffer.hasAttribute(CharTermAttribute.class)) {
                return new NoTokenFoundQueryNode();
            }
            CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);
            if (numTokens == 0) {
                return new NoTokenFoundQueryNode();
            } else if (numTokens == 1) {
                String term = null;
                try {
                    boolean hasNext;
                    hasNext = buffer.incrementToken();
                    assert hasNext == true;
                    term = termAtt.toString();
                } catch (IOException e) {
                // safe to ignore, because we know the number of tokens
                }
                fieldNode.setText(term);
                return fieldNode;
            } else if (severalTokensAtSamePosition || !(node instanceof QuotedFieldQueryNode)) {
                if (positionCount == 1 || !(node instanceof QuotedFieldQueryNode)) {
                    if (positionCount == 1) {
                        // simple case: only one position, with synonyms
                        LinkedList<QueryNode> children = new LinkedList<>();
                        for (int i = 0; i < numTokens; i++) {
                            String term = null;
                            try {
                                boolean hasNext = buffer.incrementToken();
                                assert hasNext == true;
                                term = termAtt.toString();
                            } catch (IOException e) {
                            // safe to ignore, because we know the number of tokens
                            }
                            if (buffer.hasAttribute(SKOSTypeAttribute.class) && boosts != null) {
                                SKOSTypeAttribute skosAttr = buffer.getAttribute(SKOSTypeAttribute.class);
                                children.add(new BoostQueryNode(new FieldQueryNode(field, term, -1, -1), getBoost(skosAttr.getSkosType())));
                            } else {
                                children.add(new FieldQueryNode(field, term, -1, -1));
                            }
                        }
                        return new GroupQueryNode(new StandardBooleanQueryNode(children, positionCount == 1));
                    } else {
                        // multiple positions
                        QueryNode q = new StandardBooleanQueryNode(Collections.<QueryNode>emptyList(), false);
                        QueryNode currentQuery = null;
                        for (int i = 0; i < numTokens; i++) {
                            String term = null;
                            try {
                                boolean hasNext = buffer.incrementToken();
                                assert hasNext == true;
                                term = termAtt.toString();
                            } catch (IOException e) {
                            // safe to ignore, because we know the number of tokens
                            }
                            if (posIncrAtt != null && posIncrAtt.getPositionIncrement() == 0) {
                                if (!(currentQuery instanceof BooleanQueryNode)) {
                                    QueryNode t = currentQuery;
                                    currentQuery = new StandardBooleanQueryNode(Collections.<QueryNode>emptyList(), true);
                                    ((BooleanQueryNode) currentQuery).add(t);
                                }
                                ((BooleanQueryNode) currentQuery).add(new FieldQueryNode(field, term, -1, -1));
                            } else {
                                if (currentQuery != null) {
                                    if (this.defaultOperator == Operator.OR) {
                                        q.add(currentQuery);
                                    } else {
                                        q.add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ));
                                    }
                                }
                                currentQuery = new FieldQueryNode(field, term, -1, -1);
                            }
                        }
                        if (this.defaultOperator == Operator.OR) {
                            q.add(currentQuery);
                        } else {
                            q.add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ));
                        }
                        if (q instanceof BooleanQueryNode) {
                            q = new GroupQueryNode(q);
                        }
                        return q;
                    }
                } else {
                    // phrase query:
                    MultiPhraseQueryNode mpq = new MultiPhraseQueryNode();
                    List<FieldQueryNode> multiTerms = new ArrayList<>();
                    int position = -1;
                    int i = 0;
                    int termGroupCount = 0;
                    for (; i < numTokens; i++) {
                        String term = null;
                        int positionIncrement = 1;
                        try {
                            boolean hasNext = buffer.incrementToken();
                            assert hasNext == true;
                            term = termAtt.toString();
                            if (posIncrAtt != null) {
                                positionIncrement = posIncrAtt.getPositionIncrement();
                            }
                        } catch (IOException e) {
                        // safe to ignore, because we know the number of tokens
                        }
                        if (positionIncrement > 0 && multiTerms.size() > 0) {
                            for (FieldQueryNode termNode : multiTerms) {
                                if (this.positionIncrementsEnabled) {
                                    termNode.setPositionIncrement(position);
                                } else {
                                    termNode.setPositionIncrement(termGroupCount);
                                }
                                mpq.add(termNode);
                            }
                            // Only increment once for each "group" of
                            // terms that were in the same position:
                            termGroupCount++;
                            multiTerms.clear();
                        }
                        position += positionIncrement;
                        multiTerms.add(new FieldQueryNode(field, term, -1, -1));
                    }
                    for (FieldQueryNode termNode : multiTerms) {
                        if (this.positionIncrementsEnabled) {
                            termNode.setPositionIncrement(position);
                        } else {
                            termNode.setPositionIncrement(termGroupCount);
                        }
                        mpq.add(termNode);
                    }
                    return mpq;
                }
            } else {
                TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();
                int position = -1;
                for (int i = 0; i < numTokens; i++) {
                    String term = null;
                    int positionIncrement = 1;
                    try {
                        boolean hasNext = buffer.incrementToken();
                        assert hasNext == true;
                        term = termAtt.toString();
                        if (posIncrAtt != null) {
                            positionIncrement = posIncrAtt.getPositionIncrement();
                        }
                    } catch (IOException e) {
                    // safe to ignore, because we know the number of tokens
                    }
                    FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);
                    if (this.positionIncrementsEnabled) {
                        position += positionIncrement;
                        newFieldNode.setPositionIncrement(position);
                    } else {
                        newFieldNode.setPositionIncrement(i);
                    }
                    pq.add(newFieldNode);
                }
                return pq;
            }
        } finally {
            if (buffer != null) {
                try {
                    buffer.close();
                } catch (IOException e) {
                // safe to ignore
                }
            }
        }
    }
    return node;
}
Also used : FuzzyQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode) TokenStream(org.apache.lucene.analysis.TokenStream) SKOSTypeAttribute(at.ac.univie.mminf.luceneSKOS.analysis.SKOSTypeAttribute) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) ArrayList(java.util.ArrayList) GroupQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode) WildcardQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode) FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) NoTokenFoundQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.NoTokenFoundQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) RegexpQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode) IOException(java.io.IOException) LinkedList(java.util.LinkedList) PositionIncrementAttribute(org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute) StandardBooleanQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.StandardBooleanQueryNode) TokenizedPhraseQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode) RangeQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode) ModifierQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode) MultiPhraseQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode) CharTermAttribute(org.apache.lucene.analysis.tokenattributes.CharTermAttribute) CachingTokenFilter(org.apache.lucene.analysis.CachingTokenFilter) TokenizedPhraseQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode) RangeQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode) NoTokenFoundQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.NoTokenFoundQueryNode) RegexpQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode) GroupQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode) FieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode) BooleanQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode) FuzzyQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode) QueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QueryNode) TextableQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.TextableQueryNode) MultiPhraseQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode) BoostQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode) ModifierQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode) QuotedFieldQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode) WildcardQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode) StandardBooleanQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.StandardBooleanQueryNode) TextableQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.TextableQueryNode) BooleanQueryNode(org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode) StandardBooleanQueryNode(org.apache.lucene.queryparser.flexible.standard.nodes.StandardBooleanQueryNode)

Aggregations

BoostQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode)5 QueryNode (org.apache.lucene.queryparser.flexible.core.nodes.QueryNode)4 BooleanQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode)3 FieldQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode)3 FuzzyQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode)3 GroupQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode)3 ModifierQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode)3 QuotedFieldQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode)3 RegexpQueryNode (org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode)3 QueryNodeParseException (org.apache.lucene.queryparser.flexible.core.QueryNodeParseException)2 AndQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.AndQueryNode)2 OrQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.OrQueryNode)2 SlopQueryNode (org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode)2 TermRangeQueryNode (org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode)2 SKOSTypeAttribute (at.ac.univie.mminf.luceneSKOS.analysis.SKOSTypeAttribute)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 LinkedList (java.util.LinkedList)1 CachingTokenFilter (org.apache.lucene.analysis.CachingTokenFilter)1 TokenStream (org.apache.lucene.analysis.TokenStream)1