Search in sources :

Example 21 with MapperService

use of org.elasticsearch.index.mapper.MapperService in project elasticsearch by elastic.

the class TermVectorsService method getAnalyzerAtField.

private static Analyzer getAnalyzerAtField(IndexShard indexShard, String field, @Nullable Map<String, String> perFieldAnalyzer) {
    MapperService mapperService = indexShard.mapperService();
    Analyzer analyzer;
    if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) {
        analyzer = mapperService.getIndexAnalyzers().get(perFieldAnalyzer.get(field).toString());
    } else {
        analyzer = mapperService.fullName(field).indexAnalyzer();
    }
    if (analyzer == null) {
        analyzer = mapperService.getIndexAnalyzers().getDefaultIndexAnalyzer();
    }
    return analyzer;
}
Also used : Analyzer(org.apache.lucene.analysis.Analyzer) MapperService(org.elasticsearch.index.mapper.MapperService)

Example 22 with MapperService

use of org.elasticsearch.index.mapper.MapperService in project elasticsearch by elastic.

the class RangeQueryBuilder method getRelation.

// Overridable for testing only
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException {
    IndexReader reader = queryRewriteContext.getIndexReader();
    // rewrite so just pretend there is an intersection so that the rewrite is a noop
    if (reader == null) {
        return MappedFieldType.Relation.INTERSECTS;
    }
    final MapperService mapperService = queryRewriteContext.getMapperService();
    final MappedFieldType fieldType = mapperService.fullName(fieldName);
    if (fieldType == null) {
        // no field means we have no values
        return MappedFieldType.Relation.DISJOINT;
    } else {
        DateMathParser dateMathParser = format == null ? null : new DateMathParser(format);
        return fieldType.isFieldWithinQuery(queryRewriteContext.getIndexReader(), from, to, includeLower, includeUpper, timeZone, dateMathParser, queryRewriteContext);
    }
}
Also used : IndexReader(org.apache.lucene.index.IndexReader) MappedFieldType(org.elasticsearch.index.mapper.MappedFieldType) DateMathParser(org.elasticsearch.common.joda.DateMathParser) MapperService(org.elasticsearch.index.mapper.MapperService)

Example 23 with MapperService

use of org.elasticsearch.index.mapper.MapperService in project elasticsearch by elastic.

the class CompletionSuggestionBuilder method build.

@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
    CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context);
    // copy over common settings to each suggestion builder
    final MapperService mapperService = context.getMapperService();
    populateCommonFields(mapperService, suggestionContext);
    suggestionContext.setFuzzyOptions(fuzzyOptions);
    suggestionContext.setRegexOptions(regexOptions);
    MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField());
    if (mappedFieldType == null || mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false) {
        throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field");
    }
    if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) {
        CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
        suggestionContext.setFieldType(type);
        if (type.hasContextMappings() && contextBytes != null) {
            try (XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(context.getXContentRegistry(), contextBytes)) {
                if (type.hasContextMappings() && contextParser != null) {
                    ContextMappings contextMappings = type.getContextMappings();
                    contextParser.nextToken();
                    Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size());
                    assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
                    XContentParser.Token currentToken;
                    String currentFieldName;
                    while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
                        if (currentToken == XContentParser.Token.FIELD_NAME) {
                            currentFieldName = contextParser.currentName();
                            final ContextMapping mapping = contextMappings.get(currentFieldName);
                            queryContexts.put(currentFieldName, mapping.parseQueryContext(context.newParseContext(contextParser)));
                        }
                    }
                    suggestionContext.setQueryContexts(queryContexts);
                }
            }
        } else if (contextBytes != null) {
            throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
        }
    }
    assert suggestionContext.getFieldType() != null : "no completion field type set";
    return suggestionContext;
}
Also used : HashMap(java.util.HashMap) CompletionFieldMapper(org.elasticsearch.index.mapper.CompletionFieldMapper) ContextMappings(org.elasticsearch.search.suggest.completion.context.ContextMappings) ContextMapping(org.elasticsearch.search.suggest.completion.context.ContextMapping) MappedFieldType(org.elasticsearch.index.mapper.MappedFieldType) List(java.util.List) MapperService(org.elasticsearch.index.mapper.MapperService) XContentParser(org.elasticsearch.common.xcontent.XContentParser)

Example 24 with MapperService

use of org.elasticsearch.index.mapper.MapperService in project elasticsearch by elastic.

the class PhraseSuggestionBuilder method build.

@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
    PhraseSuggestionContext suggestionContext = new PhraseSuggestionContext(context);
    MapperService mapperService = context.getMapperService();
    // copy over common settings to each suggestion builder
    populateCommonFields(mapperService, suggestionContext);
    suggestionContext.setSeparator(BytesRefs.toBytesRef(this.separator));
    suggestionContext.setRealWordErrorLikelihood(this.realWordErrorLikelihood);
    suggestionContext.setConfidence(this.confidence);
    suggestionContext.setMaxErrors(this.maxErrors);
    suggestionContext.setSeparator(BytesRefs.toBytesRef(this.separator));
    suggestionContext.setRequireUnigram(this.forceUnigrams);
    suggestionContext.setTokenLimit(this.tokenLimit);
    suggestionContext.setPreTag(BytesRefs.toBytesRef(this.preTag));
    suggestionContext.setPostTag(BytesRefs.toBytesRef(this.postTag));
    if (this.gramSize != null) {
        suggestionContext.setGramSize(this.gramSize);
    }
    for (List<CandidateGenerator> candidateGenerators : this.generators.values()) {
        for (CandidateGenerator candidateGenerator : candidateGenerators) {
            suggestionContext.addGenerator(candidateGenerator.build(mapperService));
        }
    }
    if (this.model != null) {
        suggestionContext.setModel(this.model.buildWordScorerFactory());
    }
    if (this.collateQuery != null) {
        Function<Map<String, Object>, ExecutableScript> compiledScript = context.getLazyExecutableScript(this.collateQuery, ScriptContext.Standard.SEARCH);
        suggestionContext.setCollateQueryScript(compiledScript);
        if (this.collateParams != null) {
            suggestionContext.setCollateScriptParams(this.collateParams);
        }
        suggestionContext.setCollatePrune(this.collatePrune);
    }
    if (this.gramSize == null || suggestionContext.generators().isEmpty()) {
        final ShingleTokenFilterFactory.Factory shingleFilterFactory = getShingleFilterFactory(suggestionContext.getAnalyzer());
        if (this.gramSize == null) {
            // try to detect the shingle size
            if (shingleFilterFactory != null) {
                suggestionContext.setGramSize(shingleFilterFactory.getMaxShingleSize());
                if (suggestionContext.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams()) {
                    throw new IllegalArgumentException("The default analyzer for field: [" + suggestionContext.getField() + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly");
                }
            }
        }
        if (suggestionContext.generators().isEmpty()) {
            if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams() && suggestionContext.getRequireUnigram()) {
                throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + suggestionContext.getField() + "] since it doesn't emit unigrams. " + "If this is intentional try to set the candidate generator field explicitly");
            }
            // use a default generator on the same field
            DirectCandidateGenerator generator = new DirectCandidateGenerator();
            generator.setField(suggestionContext.getField());
            suggestionContext.addGenerator(generator);
        }
    }
    return suggestionContext;
}
Also used : DirectCandidateGenerator(org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator) ShingleTokenFilterFactory(org.elasticsearch.index.analysis.ShingleTokenFilterFactory) ExecutableScript(org.elasticsearch.script.ExecutableScript) DirectCandidateGenerator(org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator) HashMap(java.util.HashMap) Map(java.util.Map) MapperService(org.elasticsearch.index.mapper.MapperService)

Example 25 with MapperService

use of org.elasticsearch.index.mapper.MapperService in project elasticsearch by elastic.

the class StoredNumericValuesTests method testBytesAndNumericRepresentation.

public void testBytesAndNumericRepresentation() throws Exception {
    IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
    String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field1").field("type", "byte").field("store", true).endObject().startObject("field2").field("type", "short").field("store", true).endObject().startObject("field3").field("type", "integer").field("store", true).endObject().startObject("field4").field("type", "float").field("store", true).endObject().startObject("field5").field("type", "long").field("store", true).endObject().startObject("field6").field("type", "double").field("store", true).endObject().startObject("field7").field("type", "ip").field("store", true).endObject().startObject("field8").field("type", "ip").field("store", true).endObject().startObject("field9").field("type", "date").field("store", true).endObject().startObject("field10").field("type", "boolean").field("store", true).endObject().endObject().endObject().endObject().string();
    MapperService mapperService = createIndex("test").mapperService();
    DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
    ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field1", 1).field("field2", 1).field("field3", 1).field("field4", 1.1).startArray("field5").value(1).value(2).value(3).endArray().field("field6", 1.1).field("field7", "192.168.1.1").field("field8", "2001:db8::2:1").field("field9", "2016-04-05").field("field10", true).endObject().bytes());
    writer.addDocument(doc.rootDoc());
    DirectoryReader reader = DirectoryReader.open(writer);
    IndexSearcher searcher = new IndexSearcher(reader);
    CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(Collections.emptySet(), Collections.singletonList("field*"), false);
    searcher.doc(0, fieldsVisitor);
    fieldsVisitor.postProcess(mapperService);
    assertThat(fieldsVisitor.fields().size(), equalTo(10));
    assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field1").get(0), equalTo((byte) 1));
    assertThat(fieldsVisitor.fields().get("field2").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field2").get(0), equalTo((short) 1));
    assertThat(fieldsVisitor.fields().get("field3").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field3").get(0), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field4").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field4").get(0), equalTo(1.1f));
    assertThat(fieldsVisitor.fields().get("field5").size(), equalTo(3));
    assertThat(fieldsVisitor.fields().get("field5").get(0), equalTo(1L));
    assertThat(fieldsVisitor.fields().get("field5").get(1), equalTo(2L));
    assertThat(fieldsVisitor.fields().get("field5").get(2), equalTo(3L));
    assertThat(fieldsVisitor.fields().get("field6").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field6").get(0), equalTo(1.1));
    assertThat(fieldsVisitor.fields().get("field7").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field7").get(0), equalTo("192.168.1.1"));
    assertThat(fieldsVisitor.fields().get("field8").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field8").get(0), equalTo("2001:db8::2:1"));
    assertThat(fieldsVisitor.fields().get("field9").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field9").get(0), equalTo("2016-04-05T00:00:00.000Z"));
    assertThat(fieldsVisitor.fields().get("field10").size(), equalTo(1));
    assertThat(fieldsVisitor.fields().get("field10").get(0), equalTo(true));
    reader.close();
    writer.close();
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) IndexWriter(org.apache.lucene.index.IndexWriter) DirectoryReader(org.apache.lucene.index.DirectoryReader) CustomFieldsVisitor(org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor) DocumentMapper(org.elasticsearch.index.mapper.DocumentMapper) CompressedXContent(org.elasticsearch.common.compress.CompressedXContent) RAMDirectory(org.apache.lucene.store.RAMDirectory) MapperService(org.elasticsearch.index.mapper.MapperService) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig)

Aggregations

MapperService (org.elasticsearch.index.mapper.MapperService)46 Settings (org.elasticsearch.common.settings.Settings)16 DocumentMapper (org.elasticsearch.index.mapper.DocumentMapper)14 IndexSettings (org.elasticsearch.index.IndexSettings)13 CompressedXContent (org.elasticsearch.common.compress.CompressedXContent)12 IOException (java.io.IOException)10 Store (org.elasticsearch.index.store.Store)9 Matchers.containsString (org.hamcrest.Matchers.containsString)9 IndexMetadata (org.elasticsearch.cluster.metadata.IndexMetadata)8 Index (org.elasticsearch.index.Index)8 ParsedDocument (org.elasticsearch.index.mapper.ParsedDocument)8 HashMap (java.util.HashMap)7 Map (java.util.Map)7 IndexService (org.elasticsearch.index.IndexService)7 AtomicLong (java.util.concurrent.atomic.AtomicLong)6 IndexAnalyzers (org.elasticsearch.index.analysis.IndexAnalyzers)6 Collections (java.util.Collections)5 HashSet (java.util.HashSet)5 List (java.util.List)5 Analyzer (org.apache.lucene.analysis.Analyzer)5