use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class PreBuiltAnalyzerTests method testThatAnalyzersAreUsedInMapping.
public void testThatAnalyzersAreUsedInMapping() throws IOException {
int randomInt = randomInt(PreBuiltAnalyzers.values().length - 1);
PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
Version randomVersion = randomVersion(random());
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build();
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
FieldMapper fieldMapper = docMapper.mappers().getMapper("field");
assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class));
NamedAnalyzer fieldMapperNamedAnalyzer = fieldMapper.fieldType().searchAnalyzer();
assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer()));
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class ParentChildFieldDataTests method setupData.
@Before
public void setupData() throws Exception {
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge(grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
Document d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(parentType, "1"), Field.Store.NO));
d.add(createJoinField(parentType, "1"));
writer.addDocument(d);
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(childType, "2"), Field.Store.NO));
d.add(new StringField(ParentFieldMapper.NAME, Uid.createUid(parentType, "1"), Field.Store.NO));
d.add(createJoinField(parentType, "1"));
d.add(createJoinField(childType, "2"));
writer.addDocument(d);
writer.commit();
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(childType, "3"), Field.Store.NO));
d.add(new StringField(ParentFieldMapper.NAME, Uid.createUid(parentType, "1"), Field.Store.NO));
d.add(createJoinField(parentType, "1"));
d.add(createJoinField(childType, "3"));
writer.addDocument(d);
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(parentType, "2"), Field.Store.NO));
d.add(createJoinField(parentType, "2"));
writer.addDocument(d);
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(childType, "4"), Field.Store.NO));
d.add(new StringField(ParentFieldMapper.NAME, Uid.createUid(parentType, "2"), Field.Store.NO));
d.add(createJoinField(parentType, "2"));
d.add(createJoinField(childType, "4"));
writer.addDocument(d);
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(childType, "5"), Field.Store.NO));
d.add(new StringField(ParentFieldMapper.NAME, Uid.createUid(parentType, "1"), Field.Store.NO));
d.add(createJoinField(parentType, "1"));
d.add(createJoinField(childType, "5"));
writer.addDocument(d);
writer.commit();
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid(grandChildType, "6"), Field.Store.NO));
d.add(new StringField(ParentFieldMapper.NAME, Uid.createUid(childType, "2"), Field.Store.NO));
d.add(createJoinField(childType, "2"));
writer.addDocument(d);
d = new Document();
d.add(new StringField(UidFieldMapper.NAME, Uid.createUid("other-type", "1"), Field.Store.NO));
writer.addDocument(d);
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class DateFieldMapperTests method testTimeZoneParsing.
/**
* Test that time zones are correctly parsed by the {@link DateFieldMapper}.
* There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373.
*/
public void testTimeZoneParsing() throws Exception {
final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'");
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field").field("type", "date").field("format", timeZonePattern).endObject().endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone();
final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone);
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)).endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue());
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class DocumentMapperMergeTests method testConcurrentMergeTest.
public void testConcurrentMergeTest() throws Throwable {
final MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false);
final DocumentMapper documentMapper = mapperService.documentMapper("test");
DocumentFieldMappers dfm = documentMapper.mappers();
try {
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
fail();
} catch (IllegalArgumentException e) {
// ok that's expected
}
final AtomicBoolean stopped = new AtomicBoolean(false);
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicReference<String> lastIntroducedFieldName = new AtomicReference<>();
final AtomicReference<Exception> error = new AtomicReference<>();
final Thread updater = new Thread() {
@Override
public void run() {
try {
barrier.await();
for (int i = 0; i < 200 && stopped.get() == false; i++) {
final String fieldName = Integer.toString(i);
ParsedDocument doc = documentMapper.parse("test", "test", fieldName, new BytesArray("{ \"" + fieldName + "\" : \"test\" }"));
Mapping update = doc.dynamicMappingsUpdate();
assert update != null;
lastIntroducedFieldName.set(fieldName);
mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
} catch (Exception e) {
error.set(e);
} finally {
stopped.set(true);
}
}
};
updater.start();
try {
barrier.await();
while (stopped.get() == false) {
final String fieldName = lastIntroducedFieldName.get();
final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }");
ParsedDocument parsedDoc = documentMapper.parse("test", "test", "random", source);
if (parsedDoc.dynamicMappingsUpdate() != null) {
// not in the mapping yet, try again
continue;
}
dfm = documentMapper.mappers();
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
}
} finally {
stopped.set(true);
updater.join();
}
if (error.get() != null) {
throw error.get();
}
}
use of org.elasticsearch.common.compress.CompressedXContent in project elasticsearch by elastic.
the class DocumentMapperMergeTests method testDoNotRepeatOriginalMapping.
public void testDoNotRepeatOriginalMapping() throws IOException {
CompressedXContent mapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_source").field("enabled", false).endObject().endObject().endObject().bytes());
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("type", mapping, MapperService.MergeReason.MAPPING_UPDATE, false);
CompressedXContent update = new CompressedXContent(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo").field("type", "text").endObject().endObject().endObject().endObject().bytes());
DocumentMapper mapper = mapperService.merge("type", update, MapperService.MergeReason.MAPPING_UPDATE, false);
assertNotNull(mapper.mappers().getMapper("foo"));
assertFalse(mapper.sourceMapper().enabled());
}
Aggregations