use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class ArrayMapperTest method testObjectArrayMappingNewColumn.
@Test
public void testObjectArrayMappingNewColumn() throws Exception {
// @formatter: off
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("array_field").field("type", ArrayMapper.CONTENT_TYPE).startObject(ArrayMapper.INNER_TYPE).field("type", "object").field("dynamic", true).startObject("properties").startObject("s").field("type", "keyword").endObject().endObject().endObject().endObject().endObject().endObject().endObject());
DocumentMapper mapper = mapper(INDEX, mapping);
// child object mapper
assertThat(mapper.objectMappers().get("array_field"), is(instanceOf(ObjectArrayMapper.class)));
BytesReference bytesReference = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("array_field").startObject().field("s", "a").field("new", true).endObject().endArray().endObject());
SourceToParse sourceToParse = new SourceToParse(INDEX, "abc", bytesReference, XContentType.JSON);
ParsedDocument doc = mapper.parse(sourceToParse);
Mapping mappingUpdate = doc.dynamicMappingsUpdate();
assertThat(mappingUpdate, notNullValue());
mapper = mapper.merge(mappingUpdate);
assertThat(doc.docs().size(), is(1));
String[] values = doc.docs().get(0).getValues("array_field.new");
assertThat(values, arrayContainingInAnyOrder(is("T"), is("1")));
String mappingSourceString = new CompressedXContent(mapper, XContentType.JSON, ToXContent.EMPTY_PARAMS).string();
assertThat(mappingSourceString, is("{\"default\":{" + "\"properties\":{" + "\"array_field\":{" + "\"type\":\"array\"," + "\"inner\":{" + "\"dynamic\":\"true\"," + "\"properties\":{" + "\"new\":{\"type\":\"boolean\"}," + "\"s\":{" + "\"type\":\"keyword\"" + "}" + "}" + "}" + "}" + "}}}"));
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class ArrayMapperTest method testParseDynamicEmptyArray.
@Test
public void testParseDynamicEmptyArray() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(TYPE).startObject("properties").endObject().endObject().endObject());
DocumentMapper mapper = mapper(INDEX, mapping);
// parse source with empty array
BytesReference bytesReference = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().array("new_array_field").endObject());
SourceToParse sourceToParse = new SourceToParse(INDEX, "abc", bytesReference, XContentType.JSON);
ParsedDocument doc = mapper.parse(sourceToParse);
assertThat(doc.docs().get(0).getField("new_array_field"), is(nullValue()));
assertThat(mapper.mappers().getMapper("new_array_field"), is(nullValue()));
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class TranslogHandler method convertToEngineOp.
private Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.Operation.Origin origin) {
switch(operation.opType()) {
case INDEX:
final Translog.Index index = (Translog.Index) operation;
final String indexName = mapperService.index().getName();
return IndexShard.prepareIndex(docMapper(Constants.DEFAULT_MAPPING_TYPE), new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()), index.seqNo(), index.primaryTerm(), index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
case DELETE:
final Translog.Delete delete = (Translog.Delete) operation;
return new Engine.Delete(delete.id(), delete.uid(), delete.seqNo(), delete.primaryTerm(), delete.version(), null, origin, System.nanoTime(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
case NO_OP:
final Translog.NoOp noOp = (Translog.NoOp) operation;
return new Engine.NoOp(noOp.seqNo(), noOp.primaryTerm(), origin, System.nanoTime(), noOp.reason());
default:
throw new IllegalStateException("No operation defined for [" + operation + "]");
}
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class IndexShardTestCase method indexDoc.
protected Engine.IndexResult indexDoc(IndexShard shard, String id, String source, XContentType xContentType, String routing) throws IOException {
SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), id, new BytesArray(source), xContentType, routing);
Engine.IndexResult result;
if (shard.routingEntry().primary()) {
result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, UNSET_AUTO_GENERATED_TIMESTAMP, false);
if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) {
updateMappings(shard, IndexMetadata.builder(shard.indexSettings().getIndexMetadata()).putMapping("default", result.getRequiredMappingUpdate().toString()).build());
result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, UNSET_AUTO_GENERATED_TIMESTAMP, false);
}
// advance local checkpoint
shard.sync();
shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getLocalCheckpoint());
} else {
final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1;
// manually replicate max_seq_no_of_updates
shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo);
result = shard.applyIndexOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0, UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse);
// advance local checkpoint
shard.sync();
if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) {
throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate());
}
}
return result;
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class TransportShardUpsertAction method shardIndexOperationOnReplica.
private void shardIndexOperationOnReplica(ShardUpsertRequest request, ShardUpsertRequest.Item item, IndexShard indexShard) {
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, item.source()).type(request.type()).id(item.id()).routing(request.routing());
try {
if (item.opType() == IndexRequest.OpType.INDEX) {
if (logger.isTraceEnabled()) {
logger.trace("[{} (R)] Updating document with id {}, source: {}", indexShard.shardId(), item.id(), item.source().toUtf8());
}
Engine.Index index = indexShard.prepareIndexOnReplica(sourceToParse, item.version(), item.versionType(), request.canHaveDuplicates());
indexShard.index(index);
} else {
if (logger.isTraceEnabled()) {
logger.trace("[{} (R)] Creating document with id {}, source: {}", indexShard.shardId(), item.id(), item.source().toUtf8());
}
Engine.Create create = indexShard.prepareCreateOnReplica(sourceToParse, item.version(), item.versionType(), request.canHaveDuplicates(), false);
indexShard.create(create);
}
} catch (Throwable t) {
// so we will fail the shard
if (!ignoreReplicaException(t)) {
throw t;
}
}
}
Aggregations