use of org.elasticsearch.index.mapper.Mapping in project elasticsearch by elastic.
the class TransportShardBulkAction method executeIndexRequestOnReplica.
/**
* Execute the given {@link IndexRequest} on a replica shard, throwing a
* {@link RetryOnReplicaException} if the operation needs to be re-tried.
*/
public static Engine.IndexResult executeIndexRequestOnReplica(DocWriteResponse primaryResponse, IndexRequest request, IndexShard replica) throws IOException {
final ShardId shardId = replica.shardId();
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, shardId.getIndexName(), request.type(), request.id(), request.source(), request.getContentType()).routing(request.routing()).parent(request.parent());
final Engine.Index operation;
final long version = primaryResponse.getVersion();
final VersionType versionType = request.versionType().versionTypeForReplicationAndRecovery();
assert versionType.validateVersionForWrites(version);
final long seqNo = primaryResponse.getSeqNo();
try {
operation = replica.prepareIndexOnReplica(sourceToParse, seqNo, version, versionType, request.getAutoGeneratedTimestamp(), request.isRetry());
} catch (MapperParsingException e) {
return new Engine.IndexResult(e, version, seqNo);
}
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
if (update != null) {
throw new RetryOnReplicaException(shardId, "Mappings are not available on the replica yet, triggered update: " + update);
}
return replica.index(operation);
}
use of org.elasticsearch.index.mapper.Mapping in project elasticsearch by elastic.
the class InternalEngineTests method dynamicUpdate.
private Mapping dynamicUpdate() {
BuilderContext context = new BuilderContext(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(), new ContentPath());
final RootObjectMapper root = new RootObjectMapper.Builder("some_type").build(context);
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
}
use of org.elasticsearch.index.mapper.Mapping in project elasticsearch by elastic.
the class DocumentMapperMergeTests method testConcurrentMergeTest.
public void testConcurrentMergeTest() throws Throwable {
final MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false);
final DocumentMapper documentMapper = mapperService.documentMapper("test");
DocumentFieldMappers dfm = documentMapper.mappers();
try {
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
fail();
} catch (IllegalArgumentException e) {
// ok that's expected
}
final AtomicBoolean stopped = new AtomicBoolean(false);
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicReference<String> lastIntroducedFieldName = new AtomicReference<>();
final AtomicReference<Exception> error = new AtomicReference<>();
final Thread updater = new Thread() {
@Override
public void run() {
try {
barrier.await();
for (int i = 0; i < 200 && stopped.get() == false; i++) {
final String fieldName = Integer.toString(i);
ParsedDocument doc = documentMapper.parse("test", "test", fieldName, new BytesArray("{ \"" + fieldName + "\" : \"test\" }"));
Mapping update = doc.dynamicMappingsUpdate();
assert update != null;
lastIntroducedFieldName.set(fieldName);
mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
} catch (Exception e) {
error.set(e);
} finally {
stopped.set(true);
}
}
};
updater.start();
try {
barrier.await();
while (stopped.get() == false) {
final String fieldName = lastIntroducedFieldName.get();
final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }");
ParsedDocument parsedDoc = documentMapper.parse("test", "test", "random", source);
if (parsedDoc.dynamicMappingsUpdate() != null) {
// not in the mapping yet, try again
continue;
}
dfm = documentMapper.mappers();
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
}
} finally {
stopped.set(true);
updater.join();
}
if (error.get() != null) {
throw error.get();
}
}
use of org.elasticsearch.index.mapper.Mapping in project crate by crate.
the class TransportShardUpsertAction method updateMappingIfRequired.
private Engine.IndexingOperation updateMappingIfRequired(ShardUpsertRequest request, ShardUpsertRequest.Item item, long version, IndexShard indexShard, Engine.IndexingOperation operation) throws Throwable {
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
if (update != null) {
validateMapping(update.root().iterator());
mappingUpdatedAction.updateMappingOnMasterSynchronously(request.shardId().getIndex(), request.type(), update);
operation = prepareIndexOnPrimary(indexShard, version, request, item);
if (operation.parsedDoc().dynamicMappingsUpdate() != null) {
throw new RetryOnPrimaryException(request.shardId(), "Dynamics mappings are not available on the node that holds the primary yet");
}
}
return operation;
}
use of org.elasticsearch.index.mapper.Mapping in project crate by crate.
the class ArrayMapperTest method testObjectArrayMappingNewColumn.
@Test
public void testObjectArrayMappingNewColumn() throws Exception {
// @formatter: off
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("array_field").field("type", ArrayMapper.CONTENT_TYPE).startObject(ArrayMapper.INNER_TYPE).field("type", "object").field("dynamic", true).startObject("properties").startObject("s").field("type", "keyword").endObject().endObject().endObject().endObject().endObject().endObject().endObject());
DocumentMapper mapper = mapper(INDEX, mapping);
// child object mapper
assertThat(mapper.objectMappers().get("array_field"), is(instanceOf(ObjectArrayMapper.class)));
BytesReference bytesReference = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("array_field").startObject().field("s", "a").field("new", true).endObject().endArray().endObject());
SourceToParse sourceToParse = new SourceToParse(INDEX, "abc", bytesReference, XContentType.JSON);
ParsedDocument doc = mapper.parse(sourceToParse);
Mapping mappingUpdate = doc.dynamicMappingsUpdate();
assertThat(mappingUpdate, notNullValue());
mapper = mapper.merge(mappingUpdate);
assertThat(doc.docs().size(), is(1));
String[] values = doc.docs().get(0).getValues("array_field.new");
assertThat(values, arrayContainingInAnyOrder(is("T"), is("1")));
String mappingSourceString = new CompressedXContent(mapper, XContentType.JSON, ToXContent.EMPTY_PARAMS).string();
assertThat(mappingSourceString, is("{\"default\":{" + "\"properties\":{" + "\"array_field\":{" + "\"type\":\"array\"," + "\"inner\":{" + "\"dynamic\":\"true\"," + "\"properties\":{" + "\"new\":{\"type\":\"boolean\"}," + "\"s\":{" + "\"type\":\"keyword\"" + "}" + "}" + "}" + "}" + "}}}"));
}
Aggregations