use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class MapperService method internalMerge.
private synchronized Map<String, DocumentMapper> internalMerge(Map<String, CompressedXContent> mappings, MergeReason reason, boolean updateAllTypes) {
DocumentMapper defaultMapper = null;
String defaultMappingSource = null;
if (mappings.containsKey(DEFAULT_MAPPING)) {
// NOTE: never apply the default here
try {
defaultMapper = documentParser.parse(DEFAULT_MAPPING, mappings.get(DEFAULT_MAPPING));
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, DEFAULT_MAPPING, e.getMessage());
}
try {
defaultMappingSource = mappings.get(DEFAULT_MAPPING).string();
} catch (IOException e) {
throw new ElasticsearchGenerationException("failed to un-compress", e);
}
}
final String defaultMappingSourceOrLastStored;
if (defaultMappingSource != null) {
defaultMappingSourceOrLastStored = defaultMappingSource;
} else {
defaultMappingSourceOrLastStored = this.defaultMappingSource;
}
List<DocumentMapper> documentMappers = new ArrayList<>();
for (Map.Entry<String, CompressedXContent> entry : mappings.entrySet()) {
String type = entry.getKey();
if (type.equals(DEFAULT_MAPPING)) {
continue;
}
final boolean applyDefault = // the default was already applied if we are recovering
reason != MergeReason.MAPPING_RECOVERY && // only apply the default mapping if we don't have the type yet
mappers.containsKey(type) == false;
try {
DocumentMapper documentMapper = documentParser.parse(type, entry.getValue(), applyDefault ? defaultMappingSourceOrLastStored : null);
documentMappers.add(documentMapper);
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
}
}
return internalMerge(defaultMapper, defaultMappingSource, documentMappers, reason, updateAllTypes);
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class MapperService method assertSerialization.
private boolean assertSerialization(DocumentMapper mapper) {
// capture the source now, it may change due to concurrent parsing
final CompressedXContent mappingSource = mapper.mappingSource();
DocumentMapper newMapper = parse(mapper.type(), mappingSource, false);
if (newMapper.mappingSource().equals(mappingSource) == false) {
throw new IllegalStateException("DocumentMapper serialization result is different from source. \n--> Source [" + mappingSource + "]\n--> Result [" + newMapper.mappingSource() + "]");
}
return true;
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class MetaDataMappingServiceTests method testMappingClusterStateUpdateDoesntChangeExistingIndices.
public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception {
final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
final CompressedXContent currentMapping = indexService.mapperService().documentMapper("type").mappingSource();
final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class);
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
// TODO - it will be nice to get a random mapping generator
final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type");
request.source("{ \"properties\" { \"field\": { \"type\": \"string\" }}}");
mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request));
assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping));
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class CategoryContextMappingTests method testIndexingWithMultipleContexts.
public void testIndexingWithMultipleContexts() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("completion").field("type", "completion").startArray("contexts").startObject().field("name", "ctx").field("type", "category").endObject().startObject().field("name", "type").field("type", "category").endObject().endArray().endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
XContentBuilder builder = jsonBuilder().startObject().startArray("completion").startObject().array("input", "suggestion5", "suggestion6", "suggestion7").field("weight", 5).startObject("contexts").array("ctx", "ctx1", "ctx2", "ctx3").array("type", "typr3", "ftg").endObject().endObject().endArray().endObject();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", builder.bytes());
IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name());
assertContextSuggestFields(fields, 3);
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class CategoryContextMappingTests method testIndexingWithNoContexts.
public void testIndexingWithNoContexts() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("completion").field("type", "completion").startArray("contexts").startObject().field("name", "ctx").field("type", "category").endObject().endArray().endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
MappedFieldType completionFieldType = fieldMapper.fieldType();
ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", jsonBuilder().startObject().startArray("completion").startObject().array("input", "suggestion1", "suggestion2").field("weight", 3).endObject().startObject().array("input", "suggestion3", "suggestion4").field("weight", 4).endObject().startObject().array("input", "suggestion5", "suggestion6", "suggestion7").field("weight", 5).endObject().endArray().endObject().bytes());
IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name());
assertContextSuggestFields(fields, 7);
}
Aggregations