use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class PrimaryReplicaSyncerTests method testSyncerOnClosingShard.
public void testSyncerOnClosingShard() throws Exception {
IndexShard shard = newStartedShard(true);
AtomicBoolean syncActionCalled = new AtomicBoolean();
PrimaryReplicaSyncer.SyncAction syncAction = (request, allocationId, primaryTerm, listener) -> {
logger.info("Sending off {} operations", request.getOperations().length);
syncActionCalled.set(true);
threadPool.generic().execute(() -> listener.onResponse(new ReplicationResponse()));
};
PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(syncAction);
// every document is sent off separately
syncer.setChunkSize(new ByteSizeValue(1));
int numDocs = 10;
for (int i = 0; i < numDocs; i++) {
// Index doc but not advance local checkpoint.
shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1L, false);
}
String allocationId = shard.routingEntry().allocationId().getId();
shard.updateShardState(shard.routingEntry(), shard.getPendingPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build());
CountDownLatch syncCalledLatch = new CountDownLatch(1);
PlainActionFuture<PrimaryReplicaSyncer.ResyncTask> fut = new PlainActionFuture<PrimaryReplicaSyncer.ResyncTask>() {
@Override
public void onFailure(Exception e) {
try {
super.onFailure(e);
} finally {
syncCalledLatch.countDown();
}
}
@Override
public void onResponse(PrimaryReplicaSyncer.ResyncTask result) {
try {
super.onResponse(result);
} finally {
syncCalledLatch.countDown();
}
}
};
threadPool.generic().execute(() -> {
syncer.resync(shard, fut);
});
if (randomBoolean()) {
syncCalledLatch.await();
}
closeShards(shard);
try {
fut.actionGet();
assertTrue("Sync action was not called", syncActionCalled.get());
} catch (AlreadyClosedException | IndexShardClosedException ignored) {
// ignore
}
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class IndexShard method applyTranslogOperation.
private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation operation, Engine.Operation.Origin origin) throws IOException {
// If a translog op is replayed on the primary (eg. ccr), we need to use external instead of null for its version type.
final VersionType versionType = (origin == Engine.Operation.Origin.PRIMARY) ? VersionType.EXTERNAL : null;
final Engine.Result result;
switch(operation.opType()) {
case INDEX:
final Translog.Index index = (Translog.Index) operation;
// we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all
// autoGeneratedID docs that are coming from the primary are updated correctly.
result = applyIndexOperation(engine, index.seqNo(), index.primaryTerm(), index.version(), versionType, UNASSIGNED_SEQ_NO, 0, index.getAutoGeneratedIdTimestamp(), true, origin, new SourceToParse(shardId.getIndexName(), index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()));
break;
case DELETE:
final Translog.Delete delete = (Translog.Delete) operation;
result = applyDeleteOperation(engine, delete.seqNo(), delete.primaryTerm(), delete.version(), delete.id(), versionType, UNASSIGNED_SEQ_NO, 0, origin);
break;
case NO_OP:
final Translog.NoOp noOp = (Translog.NoOp) operation;
result = markSeqNoAsNoop(engine, noOp.seqNo(), noOp.primaryTerm(), noOp.reason(), origin);
break;
default:
throw new IllegalStateException("No operation defined for [" + operation + "]");
}
return result;
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class ArrayMapperTest method testParseNull.
@Test
public void testParseNull() throws Exception {
// @formatter: off
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("array_field").field("type", ArrayMapper.CONTENT_TYPE).startObject(ArrayMapper.INNER_TYPE).field("type", "double").endObject().endObject().endObject().endObject().endObject());
// @formatter: on
DocumentMapper mapper = mapper(INDEX, mapping);
BytesReference bytesReference = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("array_field").endObject());
SourceToParse sourceToParse = new SourceToParse(INDEX, "abc", bytesReference, XContentType.JSON);
ParsedDocument parsedDoc = mapper.parse(sourceToParse);
assertThat(parsedDoc.docs().size(), is(1));
assertThat(parsedDoc.docs().get(0).getField("array_field"), is(nullValue()));
}
use of org.elasticsearch.index.mapper.SourceToParse in project crate by crate.
the class ArrayMapperTest method testParseDynamicNullArray.
@Test
public void testParseDynamicNullArray() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(TYPE).startObject("properties").endObject().endObject().endObject());
DocumentMapper mapper = mapper(INDEX, mapping);
// parse source with null array
BytesReference bytesReference = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("new_array_field").nullValue().endArray().endObject());
SourceToParse sourceToParse = new SourceToParse(INDEX, "abc", bytesReference, XContentType.JSON);
ParsedDocument doc = mapper.parse(sourceToParse);
assertThat(doc.docs().get(0).getField("new_array_field"), is(nullValue()));
assertThat(mapper.mappers().getMapper("new_array_field"), is(nullValue()));
}
Aggregations