Search in sources :

Example 6 with SuppressWarnings

use of edu.umd.cs.findbugs.annotations.SuppressWarnings in project SearchServices by Alfresco.

the class Solr4QueryParser method getPrefixQuery.

@SuppressWarnings("deprecation")
public Query getPrefixQuery(String field, String termStr, AnalysisMode analysisMode) throws ParseException {
    if (field.equals(FIELD_PATH)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_PATH);
    } else if (field.equals(FIELD_PATHWITHREPEATS)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_PATHWITHREPEATS);
    } else if (field.equals(FIELD_TEXT)) {
        return createDefaultTextQuery(textField -> getPrefixQuery(textField, termStr, analysisMode));
    } else if (field.equals(FIELD_ID)) {
        boolean lowercaseExpandedTerms = getLowercaseExpandedTerms();
        try {
            setLowercaseExpandedTerms(false);
            return super.getPrefixQuery(FIELD_LID, termStr);
        } finally {
            setLowercaseExpandedTerms(lowercaseExpandedTerms);
        }
    } else if (field.equals(FIELD_DBID) || field.equals(FIELD_ISROOT) || field.equals(FIELD_ISCONTAINER) || field.equals(FIELD_ISNODE) || field.equals(FIELD_TX) || field.equals(FIELD_PARENT) || field.equals(FIELD_PRIMARYPARENT) || field.equals(FIELD_QNAME) || field.equals(FIELD_PRIMARYASSOCTYPEQNAME) || field.equals(FIELD_ASSOCTYPEQNAME)) {
        boolean lowercaseExpandedTerms = getLowercaseExpandedTerms();
        try {
            setLowercaseExpandedTerms(false);
            return super.getPrefixQuery(field, termStr);
        } finally {
            setLowercaseExpandedTerms(lowercaseExpandedTerms);
        }
    } else if (field.equals(FIELD_CLASS)) {
        return super.getPrefixQuery(field, termStr);
    // throw new UnsupportedOperationException("Prefix Queries are not
    // support for "+FIELD_CLASS);
    } else if (field.equals(FIELD_TYPE)) {
        return super.getPrefixQuery(field, termStr);
    // throw new UnsupportedOperationException("Prefix Queries are not
    // support for "+FIELD_TYPE);
    } else if (field.equals(FIELD_EXACTTYPE)) {
        return super.getPrefixQuery(field, termStr);
    // throw new UnsupportedOperationException("Prefix Queries are not
    // support for "+FIELD_EXACTTYPE);
    } else if (field.equals(FIELD_ASPECT)) {
        return super.getPrefixQuery(field, termStr);
    // throw new UnsupportedOperationException("Prefix Queries are not
    // support for "+FIELD_ASPECT);
    } else if (field.equals(FIELD_EXACTASPECT)) {
        return super.getPrefixQuery(field, termStr);
    // throw new UnsupportedOperationException("Prefix Queries are not
    // support for "+FIELD_EXACTASPECT);
    } else if (isPropertyField(field)) {
        return attributeQueryBuilder(field, termStr, new PrefixQuery(), analysisMode, LuceneFunction.FIELD);
    } else if (field.equals(FIELD_ALL)) {
        Set<String> all = searchParameters.getAllAttributes();
        if ((all == null) || (all.size() == 0)) {
            Collection<QName> contentAttributes = dictionaryService.getAllProperties(null);
            BooleanQuery.Builder query = new BooleanQuery.Builder();
            for (QName qname : contentAttributes) {
                // The super implementation will create phrase queries etc
                // if required
                Query part = getPrefixQuery(PROPERTY_FIELD_PREFIX + qname.toString(), termStr, analysisMode);
                if (part != null) {
                    query.add(part, Occur.SHOULD);
                } else {
                    query.add(createNoMatchQuery(), Occur.SHOULD);
                }
            }
            return query.build();
        } else {
            BooleanQuery.Builder query = new BooleanQuery.Builder();
            for (String fieldName : all) {
                Query part = getPrefixQuery(fieldName, termStr, analysisMode);
                if (part != null) {
                    query.add(part, Occur.SHOULD);
                } else {
                    query.add(createNoMatchQuery(), Occur.SHOULD);
                }
            }
            return query.build();
        }
    } else if (field.equals(FIELD_ISUNSET)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_ISUNSET);
    } else if (field.equals(FIELD_ISNULL)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_ISNULL);
    } else if (field.equals(FIELD_ISNOTNULL)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_ISNOTNULL);
    } else if (field.equals(FIELD_EXISTS)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_EXISTS);
    } else if (QueryParserUtils.matchDataTypeDefinition(searchParameters.getNamespace(), namespacePrefixResolver, dictionaryService, field) != null) {
        Collection<QName> contentAttributes = dictionaryService.getAllProperties(QueryParserUtils.matchDataTypeDefinition(searchParameters.getNamespace(), namespacePrefixResolver, dictionaryService, field).getName());
        BooleanQuery.Builder query = new BooleanQuery.Builder();
        for (QName qname : contentAttributes) {
            // The super implementation will create phrase queries etc if
            // required
            Query part = getPrefixQuery(PROPERTY_FIELD_PREFIX + qname.toString(), termStr, analysisMode);
            if (part != null) {
                query.add(part, Occur.SHOULD);
            } else {
                query.add(createNoMatchQuery(), Occur.SHOULD);
            }
        }
        return query.build();
    } else if (field.equals(FIELD_FTSSTATUS)) {
        throw new UnsupportedOperationException("Prefix Queries are not support for " + FIELD_FTSSTATUS);
    } else if (field.equals(FIELD_TAG)) {
        return super.getPrefixQuery(field, termStr);
    } else if (field.equals(FIELD_SITE)) {
        return super.getPrefixQuery(field, termStr);
    } else if (field.equals(FIELD_NPATH)) {
        return super.getPrefixQuery(field, termStr);
    } else if (field.equals(FIELD_PNAME)) {
        return super.getPrefixQuery(field, termStr);
    } else {
        return super.getPrefixQuery(field, termStr);
    }
}
Also used : BooleanQuery(org.apache.lucene.search.BooleanQuery) Set(java.util.Set) OrderedHashSet(org.antlr.misc.OrderedHashSet) HashSet(java.util.HashSet) Query(org.apache.lucene.search.Query) RegexpQuery(org.apache.lucene.search.RegexpQuery) LegacyNumericRangeQuery(org.apache.lucene.search.LegacyNumericRangeQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) ConstantScoreQuery(org.apache.lucene.search.ConstantScoreQuery) SpanNearQuery(org.apache.lucene.search.spans.SpanNearQuery) SpanOrQuery(org.apache.lucene.search.spans.SpanOrQuery) MultiTermQuery(org.apache.lucene.search.MultiTermQuery) SpanTermQuery(org.apache.lucene.search.spans.SpanTermQuery) SpanQuery(org.apache.lucene.search.spans.SpanQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) QName(org.alfresco.service.namespace.QName) Builder(org.apache.lucene.search.BooleanQuery.Builder) Collection(java.util.Collection) Builder(org.apache.lucene.search.BooleanQuery.Builder) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings)

Example 7 with SuppressWarnings

use of edu.umd.cs.findbugs.annotations.SuppressWarnings in project SearchServices by Alfresco.

the class Solr4QueryParser method getWildcardQuery.

@SuppressWarnings("deprecation")
public Query getWildcardQuery(String field, String termStr, AnalysisMode analysisMode) throws ParseException {
    if (field.equals(FIELD_PATH)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_PATH);
    } else if (field.equals(FIELD_PATHWITHREPEATS)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_PATHWITHREPEATS);
    } else if (field.equals(FIELD_TEXT)) {
        return createDefaultTextQuery(textField -> getWildcardQuery(textField, termStr, analysisMode));
    } else if (field.equals(FIELD_ID)) {
        boolean lowercaseExpandedTerms = getLowercaseExpandedTerms();
        try {
            setLowercaseExpandedTerms(false);
            return super.getWildcardQuery(FIELD_LID, termStr);
        } finally {
            setLowercaseExpandedTerms(lowercaseExpandedTerms);
        }
    } else if (field.equals(FIELD_DBID) || field.equals(FIELD_ISROOT) || field.equals(FIELD_ISCONTAINER) || field.equals(FIELD_ISNODE) || field.equals(FIELD_TX) || field.equals(FIELD_PARENT) || field.equals(FIELD_PRIMARYPARENT) || field.equals(FIELD_QNAME) || field.equals(FIELD_PRIMARYASSOCTYPEQNAME) || field.equals(FIELD_ASSOCTYPEQNAME)) {
        boolean lowercaseExpandedTerms = getLowercaseExpandedTerms();
        try {
            setLowercaseExpandedTerms(false);
            return super.getWildcardQuery(field, termStr);
        } finally {
            setLowercaseExpandedTerms(lowercaseExpandedTerms);
        }
    } else if (field.equals(FIELD_CLASS)) {
        return super.getWildcardQuery(field, termStr);
    // throw new UnsupportedOperationException("Wildcard Queries are not
    // support for "+FIELD_CLASS);
    } else if (field.equals(FIELD_TYPE)) {
        return super.getWildcardQuery(field, termStr);
    // throw new UnsupportedOperationException("Wildcard Queries are not
    // support for "+FIELD_TYPE);
    } else if (field.equals(FIELD_EXACTTYPE)) {
        return super.getWildcardQuery(field, termStr);
    // throw new UnsupportedOperationException("Wildcard Queries are not
    // support for "+FIELD_EXACTTYPE);
    } else if (field.equals(FIELD_ASPECT)) {
        return super.getWildcardQuery(field, termStr);
    // throw new UnsupportedOperationException("Wildcard Queries are not
    // support for "+FIELD_ASPECT);
    } else if (field.equals(FIELD_EXACTASPECT)) {
        return super.getWildcardQuery(field, termStr);
    // throw new UnsupportedOperationException("Wildcard Queries are not
    // support for "+FIELD_EXACTASPECT);
    } else if (isPropertyField(field)) {
        return attributeQueryBuilder(field, termStr, new WildcardQuery(), analysisMode, LuceneFunction.FIELD);
    } else if (field.equals(FIELD_ALL)) {
        Set<String> all = searchParameters.getAllAttributes();
        if ((all == null) || (all.size() == 0)) {
            Collection<QName> contentAttributes = dictionaryService.getAllProperties(null);
            BooleanQuery.Builder query = new BooleanQuery.Builder();
            for (QName qname : contentAttributes) {
                // The super implementation will create phrase queries etc
                // if required
                Query part = getWildcardQuery(PROPERTY_FIELD_PREFIX + qname.toString(), termStr, analysisMode);
                if (part != null) {
                    query.add(part, Occur.SHOULD);
                } else {
                    query.add(createNoMatchQuery(), Occur.SHOULD);
                }
            }
            return query.build();
        } else {
            BooleanQuery.Builder query = new BooleanQuery.Builder();
            for (String fieldName : all) {
                Query part = getWildcardQuery(fieldName, termStr, analysisMode);
                if (part != null) {
                    query.add(part, Occur.SHOULD);
                } else {
                    query.add(createNoMatchQuery(), Occur.SHOULD);
                }
            }
            return query.build();
        }
    } else if (field.equals(FIELD_ISUNSET)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_ISUNSET);
    } else if (field.equals(FIELD_ISNULL)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_ISNULL);
    } else if (field.equals(FIELD_ISNOTNULL)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_ISNOTNULL);
    } else if (field.equals(FIELD_EXISTS)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_EXISTS);
    } else if (QueryParserUtils.matchDataTypeDefinition(searchParameters.getNamespace(), namespacePrefixResolver, dictionaryService, field) != null) {
        Collection<QName> contentAttributes = dictionaryService.getAllProperties(QueryParserUtils.matchDataTypeDefinition(searchParameters.getNamespace(), namespacePrefixResolver, dictionaryService, field).getName());
        BooleanQuery.Builder query = new BooleanQuery.Builder();
        for (QName qname : contentAttributes) {
            // The super implementation will create phrase queries etc if
            // required
            Query part = getWildcardQuery(PROPERTY_FIELD_PREFIX + qname.toString(), termStr, analysisMode);
            if (part != null) {
                query.add(part, Occur.SHOULD);
            } else {
                query.add(createNoMatchQuery(), Occur.SHOULD);
            }
        }
        return query.build();
    } else if (field.equals(FIELD_FTSSTATUS)) {
        throw new UnsupportedOperationException("Wildcard Queries are not support for " + FIELD_FTSSTATUS);
    } else if (field.equals(FIELD_TAG)) {
        return super.getWildcardQuery(field, termStr);
    } else if (field.equals(FIELD_SITE)) {
        return super.getWildcardQuery(field, termStr);
    } else if (field.equals(FIELD_PNAME)) {
        return super.getWildcardQuery(field, termStr);
    } else if (field.equals(FIELD_NPATH)) {
        return super.getWildcardQuery(field, termStr);
    } else {
        return super.getWildcardQuery(field, termStr);
    }
}
Also used : BooleanQuery(org.apache.lucene.search.BooleanQuery) Set(java.util.Set) OrderedHashSet(org.antlr.misc.OrderedHashSet) HashSet(java.util.HashSet) Query(org.apache.lucene.search.Query) RegexpQuery(org.apache.lucene.search.RegexpQuery) LegacyNumericRangeQuery(org.apache.lucene.search.LegacyNumericRangeQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) ConstantScoreQuery(org.apache.lucene.search.ConstantScoreQuery) SpanNearQuery(org.apache.lucene.search.spans.SpanNearQuery) SpanOrQuery(org.apache.lucene.search.spans.SpanOrQuery) MultiTermQuery(org.apache.lucene.search.MultiTermQuery) SpanTermQuery(org.apache.lucene.search.spans.SpanTermQuery) SpanQuery(org.apache.lucene.search.spans.SpanQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) QName(org.alfresco.service.namespace.QName) Builder(org.apache.lucene.search.BooleanQuery.Builder) Collection(java.util.Collection) Builder(org.apache.lucene.search.BooleanQuery.Builder) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings)

Example 8 with SuppressWarnings

use of edu.umd.cs.findbugs.annotations.SuppressWarnings in project SearchServices by Alfresco.

the class TempFileWarningLogger method checkFiles.

// Avoid FindBugs false positive (https://github.com/spotbugs/spotbugs/issues/756)
@SuppressWarnings("RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE")
public boolean checkFiles() {
    if (log.isDebugEnabled()) {
        log.debug("Looking for temp files matching " + glob + " in directory " + dir);
    }
    try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, glob)) {
        for (Path file : stream) {
            if (log.isDebugEnabled()) {
                log.debug("Solr suggester temp file found matching file pattern: " + glob + ", path: " + file);
                log.debug("Removing suggester temp files.");
            }
            return true;
        }
        return false;
    } catch (IOException e) {
        throw new RuntimeException("Unable to create directory stream", e);
    }
}
Also used : Path(java.nio.file.Path) IOException(java.io.IOException) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings)

Example 9 with SuppressWarnings

use of edu.umd.cs.findbugs.annotations.SuppressWarnings in project incubator-gobblin by apache.

the class Fork method consumeRecordStream.

@SuppressWarnings(value = "RV_RETURN_VALUE_IGNORED", justification = "We actually don't care about the return value of subscribe.")
public void consumeRecordStream(RecordStreamWithMetadata<D, S> stream) throws RecordStreamProcessor.StreamProcessingException {
    if (this.converter instanceof MultiConverter) {
        // if multiconverter, unpack it
        for (Converter cverter : ((MultiConverter) this.converter).getConverters()) {
            stream = cverter.processStream(stream, this.taskState);
        }
    } else {
        stream = this.converter.processStream(stream, this.taskState);
    }
    stream = this.rowLevelPolicyChecker.processStream(stream, this.taskState);
    stream = stream.mapStream(s -> s.map(r -> {
        onEachRecord();
        return r;
    }));
    stream = stream.mapStream(s -> s.doOnSubscribe(subscription -> onStart()));
    stream = stream.mapStream(s -> s.doOnComplete(() -> verifyAndSetForkState(ForkState.RUNNING, ForkState.SUCCEEDED)));
    stream = stream.mapStream(s -> s.doOnCancel(() -> {
        // Errors don't propagate up from below the fork, but cancel the stream, so use the failed state to indicate that
        // the fork failed to complete, which will then fail the task.
        verifyAndSetForkState(ForkState.RUNNING, ForkState.FAILED);
    }));
    stream = stream.mapStream(s -> s.doOnError(exc -> {
        verifyAndSetForkState(ForkState.RUNNING, ForkState.FAILED);
        this.logger.error(String.format("Fork %d of task %s failed to process data records", this.index, this.taskId), exc);
    }));
    stream = stream.mapStream(s -> s.doFinally(this::cleanup));
    stream.getRecordStream().subscribe(r -> {
        if (r instanceof RecordEnvelope) {
            this.writer.get().writeEnvelope((RecordEnvelope) r);
        } else if (r instanceof ControlMessage) {
            // This is to avoid missing an ack/nack in the error path.
            try {
                this.writer.get().getMessageHandler().handleMessage((ControlMessage) r);
            } catch (Throwable error) {
                r.nack(error);
                throw error;
            }
            r.ack();
        }
    }, e -> {
        // Handle writer close in error case since onComplete will not call when exception happens
        if (this.writer.isPresent()) {
            this.writer.get().close();
        }
        logger.error("Failed to process record.", e);
        verifyAndSetForkState(ForkState.RUNNING, ForkState.FAILED);
    }, () -> {
        if (this.writer.isPresent()) {
            this.writer.get().close();
        }
    });
}
Also used : ForkOperatorUtils(org.apache.gobblin.util.ForkOperatorUtils) SpeculativeAttemptAwareConstruct(org.apache.gobblin.commit.SpeculativeAttemptAwareConstruct) GobblinMetrics(org.apache.gobblin.metrics.GobblinMetrics) ExecutionModel(org.apache.gobblin.runtime.ExecutionModel) LoggerFactory(org.slf4j.LoggerFactory) ControlMessage(org.apache.gobblin.stream.ControlMessage) BoundedBlockingRecordQueue(org.apache.gobblin.runtime.BoundedBlockingRecordQueue) TaskPublisher(org.apache.gobblin.publisher.TaskPublisher) PartitionedDataWriter(org.apache.gobblin.writer.PartitionedDataWriter) AtomicReference(java.util.concurrent.atomic.AtomicReference) Task(org.apache.gobblin.runtime.Task) TaskState(org.apache.gobblin.runtime.TaskState) Closer(com.google.common.io.Closer) DataWriterBuilder(org.apache.gobblin.writer.DataWriterBuilder) Optional(com.google.common.base.Optional) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings) ExceptionCleanupUtils(org.apache.gobblin.runtime.util.ExceptionCleanupUtils) GobblinScopeTypes(org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes) ForkMetrics(org.apache.gobblin.runtime.util.ForkMetrics) RecordStreamProcessor(org.apache.gobblin.records.RecordStreamProcessor) TaskLevelPolicyCheckResults(org.apache.gobblin.qualitychecker.task.TaskLevelPolicyCheckResults) Logger(org.slf4j.Logger) TaskContext(org.apache.gobblin.runtime.TaskContext) Converter(org.apache.gobblin.converter.Converter) Instrumented(org.apache.gobblin.instrumented.Instrumented) State(org.apache.gobblin.configuration.State) RowLevelPolicyCheckResults(org.apache.gobblin.qualitychecker.row.RowLevelPolicyCheckResults) TaskExecutor(org.apache.gobblin.runtime.TaskExecutor) Throwables(com.google.common.base.Throwables) IOException(java.io.IOException) FinalState(org.apache.gobblin.util.FinalState) ConfigurationKeys(org.apache.gobblin.configuration.ConfigurationKeys) DataWriter(org.apache.gobblin.writer.DataWriter) RecordEnvelope(org.apache.gobblin.stream.RecordEnvelope) WatermarkAwareWriter(org.apache.gobblin.writer.WatermarkAwareWriter) DataWriterWrapperBuilder(org.apache.gobblin.writer.DataWriterWrapperBuilder) Destination(org.apache.gobblin.writer.Destination) Closeable(java.io.Closeable) RowLevelPolicyChecker(org.apache.gobblin.qualitychecker.row.RowLevelPolicyChecker) RecordStreamWithMetadata(org.apache.gobblin.records.RecordStreamWithMetadata) SharedResourcesBroker(org.apache.gobblin.broker.iface.SharedResourcesBroker) ForkThrowableHolder(org.apache.gobblin.runtime.ForkThrowableHolder) Preconditions(com.google.common.base.Preconditions) RecordStreamConsumer(org.apache.gobblin.records.RecordStreamConsumer) DataConversionException(org.apache.gobblin.converter.DataConversionException) Constructs(org.apache.gobblin.Constructs) MultiConverter(org.apache.gobblin.runtime.MultiConverter) ConstructState(org.apache.gobblin.state.ConstructState) MultiConverter(org.apache.gobblin.runtime.MultiConverter) RecordEnvelope(org.apache.gobblin.stream.RecordEnvelope) Converter(org.apache.gobblin.converter.Converter) MultiConverter(org.apache.gobblin.runtime.MultiConverter) ControlMessage(org.apache.gobblin.stream.ControlMessage) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings)

Example 10 with SuppressWarnings

use of edu.umd.cs.findbugs.annotations.SuppressWarnings in project st-js by st-js.

the class NodeJSExecutor method run.

/**
 * <p>run.</p>
 *
 * @param srcFile a {@link java.io.File} object.
 * @return a {@link org.stjs.generator.executor.ExecutionResult} object.
 */
@SuppressWarnings(value = "REC_CATCH_EXCEPTION")
public ExecutionResult run(File srcFile) {
    try {
        Process p = Runtime.getRuntime().exec(new String[] { NODE_JS, srcFile.getAbsolutePath() });
        int exitValue = p.waitFor();
        return new ExecutionResult(null, readStream(p.getInputStream()), readStream(p.getErrorStream()), exitValue);
    } catch (IOException e) {
        // TODO : this is not really going to be working on all OS!
        if (e.getMessage().contains("Cannot run program")) {
            String errMsg = "Please install node.js to use this feature https://github.com/joyent/node/wiki/Installation";
            throw new STJSRuntimeException(errMsg, e);
        }
        throw new STJSRuntimeException(e);
    } catch (InterruptedException e) {
        throw new STJSRuntimeException(e);
    }
}
Also used : STJSRuntimeException(org.stjs.generator.STJSRuntimeException) IOException(java.io.IOException) SuppressWarnings(edu.umd.cs.findbugs.annotations.SuppressWarnings)

Aggregations

SuppressWarnings (edu.umd.cs.findbugs.annotations.SuppressWarnings)16 IOException (java.io.IOException)8 BooleanQuery (org.apache.lucene.search.BooleanQuery)4 Builder (org.apache.lucene.search.BooleanQuery.Builder)4 ConstantScoreQuery (org.apache.lucene.search.ConstantScoreQuery)4 LegacyNumericRangeQuery (org.apache.lucene.search.LegacyNumericRangeQuery)4 MatchAllDocsQuery (org.apache.lucene.search.MatchAllDocsQuery)4 MultiTermQuery (org.apache.lucene.search.MultiTermQuery)4 Query (org.apache.lucene.search.Query)4 RegexpQuery (org.apache.lucene.search.RegexpQuery)4 TermQuery (org.apache.lucene.search.TermQuery)4 TermRangeQuery (org.apache.lucene.search.TermRangeQuery)4 SpanNearQuery (org.apache.lucene.search.spans.SpanNearQuery)4 SpanOrQuery (org.apache.lucene.search.spans.SpanOrQuery)4 SpanQuery (org.apache.lucene.search.spans.SpanQuery)4 SpanTermQuery (org.apache.lucene.search.spans.SpanTermQuery)4 Collection (java.util.Collection)3 QName (org.alfresco.service.namespace.QName)3 OrderedHashSet (org.antlr.misc.OrderedHashSet)3 TokenStream (org.apache.lucene.analysis.TokenStream)3