Search in sources :

Example 66 with ParsedDocument

use of org.elasticsearch.index.mapper.ParsedDocument in project crate by crate.

the class InternalEngineTests method testShouldPeriodicallyFlush.

@Test
public void testShouldPeriodicallyFlush() throws Exception {
    assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false));
    // A new engine may have more than one empty translog files - the test should account this extra.
    final Translog translog = engine.getTranslog();
    final IntSupplier uncommittedTranslogOperationsSinceLastCommit = () -> {
        long localCheckpoint = Long.parseLong(engine.getLastCommittedSegmentInfos().userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
        return translog.totalOperationsByMinGen(translog.getMinGenerationForSeqNo(localCheckpoint + 1).translogFileGeneration);
    };
    final long extraTranslogSizeInNewEngine = engine.getTranslog().stats().getUncommittedSizeInBytes() - Translog.DEFAULT_HEADER_SIZE_IN_BYTES;
    int numDocs = between(10, 100);
    for (int id = 0; id < numDocs; id++) {
        final ParsedDocument doc = testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
        engine.index(indexForDoc(doc));
    }
    assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false));
    long flushThreshold = RandomNumbers.randomLongBetween(random(), 120, engine.getTranslog().stats().getUncommittedSizeInBytes() - extraTranslogSizeInNewEngine);
    final IndexSettings indexSettings = engine.config().getIndexSettings();
    final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), flushThreshold + "b")).build();
    indexSettings.updateIndexMetadata(indexMetadata);
    engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(), indexSettings.getSoftDeleteRetentionOperations());
    assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(numDocs));
    assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
    engine.flush();
    assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
    // Stale operations skipped by Lucene but added to translog - still able to flush
    for (int id = 0; id < numDocs; id++) {
        final ParsedDocument doc = testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
        final Engine.IndexResult result = engine.index(replicaIndexForDoc(doc, 1L, id, false));
        assertThat(result.isCreated(), equalTo(false));
    }
    SegmentInfos lastCommitInfo = engine.getLastCommittedSegmentInfos();
    assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(numDocs));
    assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
    engine.flush(false, false);
    assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
    assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
    // If the new index commit still points to the same translog generation as the current index commit,
    // we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes.
    // create a gap here
    generateNewSeqNo(engine);
    for (int id = 0; id < numDocs; id++) {
        if (randomBoolean()) {
            translog.rollGeneration();
        }
        final ParsedDocument doc = testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null);
        engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false));
        if (engine.shouldPeriodicallyFlush()) {
            engine.flush();
            assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
            assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
        }
    }
}
Also used : SegmentInfos(org.apache.lucene.index.SegmentInfos) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) IntSupplier(java.util.function.IntSupplier) IndexSettings(org.elasticsearch.index.IndexSettings) IndexMetadata(org.elasticsearch.cluster.metadata.IndexMetadata) LongPoint(org.apache.lucene.document.LongPoint) TestTranslog(org.elasticsearch.index.translog.TestTranslog) Translog(org.elasticsearch.index.translog.Translog) Test(org.junit.Test)

Example 67 with ParsedDocument

use of org.elasticsearch.index.mapper.ParsedDocument in project crate by crate.

the class InternalEngineTests method testTranslogReplay.

@Test
public void testTranslogReplay() throws IOException {
    final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getProcessedLocalCheckpoint();
    final int numDocs = randomIntBetween(1, 10);
    for (int i = 0; i < numDocs; i++) {
        ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
        Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
        Engine.IndexResult indexResult = engine.index(firstIndexRequest);
        assertThat(indexResult.getVersion(), equalTo(1L));
    }
    assertVisibleCount(engine, numDocs);
    translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
    engine.close();
    // we need to reuse the engine config unless the parser.mappingModified won't work
    engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier));
    engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
    engine.refresh("warm_up");
    assertVisibleCount(engine, numDocs, false);
    engine.close();
    translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
    engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
    engine.refresh("warm_up");
    assertVisibleCount(engine, numDocs, false);
    final boolean flush = randomBoolean();
    int randomId = randomIntBetween(numDocs + 1, numDocs + 10);
    ParsedDocument doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
    Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
    Engine.IndexResult indexResult = engine.index(firstIndexRequest);
    assertThat(indexResult.getVersion(), equalTo(1L));
    if (flush) {
        engine.flush();
        engine.refresh("test");
    }
    doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
    Engine.Index idxRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
    Engine.IndexResult result = engine.index(idxRequest);
    engine.refresh("test");
    assertThat(result.getVersion(), equalTo(2L));
    try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
        TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
        assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
    }
    engine.close();
    translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
    engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
    engine.refresh("warm_up");
    try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
        TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
        assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
    }
    engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), UNASSIGNED_SEQ_NO, primaryTerm.get(), Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), UNASSIGNED_SEQ_NO, 0));
    if (randomBoolean()) {
        engine.close();
        engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
    }
    engine.refresh("test");
    try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
        TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs);
        assertThat(topDocs.totalHits.value, equalTo((long) numDocs));
    }
}
Also used : Searcher(org.elasticsearch.index.engine.Engine.Searcher) BytesArray(org.elasticsearch.common.bytes.BytesArray) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) LongPoint(org.apache.lucene.document.LongPoint) TopDocs(org.apache.lucene.search.TopDocs) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) LongSupplier(java.util.function.LongSupplier) Test(org.junit.Test)

Example 68 with ParsedDocument

use of org.elasticsearch.index.mapper.ParsedDocument in project crate by crate.

the class InternalEngineTests method testTranslogMultipleOperationsSameDocument.

@Test
public void testTranslogMultipleOperationsSameDocument() throws IOException {
    final int ops = randomIntBetween(1, 32);
    Engine initialEngine;
    final List<Engine.Operation> operations = new ArrayList<>();
    try {
        initialEngine = engine;
        for (int i = 0; i < ops; i++) {
            final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
            if (randomBoolean()) {
                final Engine.Index operation = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
                operations.add(operation);
                initialEngine.index(operation);
            } else {
                final Engine.Delete operation = new Engine.Delete("1", newUid(doc), UNASSIGNED_SEQ_NO, 0, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), UNASSIGNED_SEQ_NO, 0);
                operations.add(operation);
                initialEngine.delete(operation);
            }
        }
    } finally {
        IOUtils.close(engine);
    }
    try (Engine recoveringEngine = new InternalEngine(engine.config())) {
        recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
        recoveringEngine.refresh("test");
        try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) {
            final TotalHitCountCollector collector = new TotalHitCountCollector();
            searcher.search(new MatchAllDocsQuery(), collector);
            assertThat(collector.getTotalHits(), equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1));
        }
    }
}
Also used : Searcher(org.elasticsearch.index.engine.Engine.Searcher) ArrayList(java.util.ArrayList) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) LongPoint(org.apache.lucene.document.LongPoint) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) TotalHitCountCollector(org.apache.lucene.search.TotalHitCountCollector) Test(org.junit.Test)

Example 69 with ParsedDocument

use of org.elasticsearch.index.mapper.ParsedDocument in project crate by crate.

the class InternalEngineTests method testConcurrentOutOfOrderDocsOnReplica.

@Test
public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, InterruptedException {
    final List<Engine.Operation> opsDoc1 = generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "1");
    final Engine.Operation lastOpDoc1 = opsDoc1.get(opsDoc1.size() - 1);
    final String lastFieldValueDoc1;
    if (lastOpDoc1 instanceof Engine.Index) {
        Engine.Index index = (Engine.Index) lastOpDoc1;
        lastFieldValueDoc1 = index.docs().get(0).get("value");
    } else {
        // delete
        lastFieldValueDoc1 = null;
    }
    final List<Engine.Operation> opsDoc2 = generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "2");
    final Engine.Operation lastOpDoc2 = opsDoc2.get(opsDoc2.size() - 1);
    final String lastFieldValueDoc2;
    if (lastOpDoc2 instanceof Engine.Index) {
        Engine.Index index = (Engine.Index) lastOpDoc2;
        lastFieldValueDoc2 = index.docs().get(0).get("value");
    } else {
        // delete
        lastFieldValueDoc2 = null;
    }
    // randomly interleave
    final AtomicLong seqNoGenerator = new AtomicLong();
    BiFunction<Engine.Operation, Long, Engine.Operation> seqNoUpdater = (operation, newSeqNo) -> {
        if (operation instanceof Engine.Index) {
            Engine.Index index = (Engine.Index) operation;
            Document doc = testDocumentWithTextField(index.docs().get(0).get("value"));
            ParsedDocument parsedDocument = testParsedDocument(index.id(), index.routing(), doc, index.source(), null);
            return new Engine.Index(index.uid(), parsedDocument, newSeqNo, index.primaryTerm(), index.version(), index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(), UNASSIGNED_SEQ_NO, 0);
        } else {
            Engine.Delete delete = (Engine.Delete) operation;
            return new Engine.Delete(delete.id(), delete.uid(), newSeqNo, delete.primaryTerm(), delete.version(), delete.versionType(), delete.origin(), delete.startTime(), UNASSIGNED_SEQ_NO, 0);
        }
    };
    final List<Engine.Operation> allOps = new ArrayList<>();
    Iterator<Engine.Operation> iter1 = opsDoc1.iterator();
    Iterator<Engine.Operation> iter2 = opsDoc2.iterator();
    while (iter1.hasNext() && iter2.hasNext()) {
        final Engine.Operation next = randomBoolean() ? iter1.next() : iter2.next();
        allOps.add(seqNoUpdater.apply(next, seqNoGenerator.getAndIncrement()));
    }
    iter1.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
    iter2.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
    // insert some duplicates
    randomSubsetOf(allOps).forEach(op -> allOps.add(seqNoUpdater.apply(op, op.seqNo())));
    shuffle(allOps, random());
    concurrentlyApplyOps(allOps, engine);
    engine.refresh("test");
    if (lastFieldValueDoc1 != null) {
        try (Searcher searcher = engine.acquireSearcher("test")) {
            final TotalHitCountCollector collector = new TotalHitCountCollector();
            searcher.search(new TermQuery(new Term("value", lastFieldValueDoc1)), collector);
            assertThat(collector.getTotalHits(), equalTo(1));
        }
    }
    if (lastFieldValueDoc2 != null) {
        try (Searcher searcher = engine.acquireSearcher("test")) {
            final TotalHitCountCollector collector = new TotalHitCountCollector();
            searcher.search(new TermQuery(new Term("value", lastFieldValueDoc2)), collector);
            assertThat(collector.getTotalHits(), equalTo(1));
        }
    }
    int totalExpectedOps = 0;
    if (lastFieldValueDoc1 != null) {
        totalExpectedOps++;
    }
    if (lastFieldValueDoc2 != null) {
        totalExpectedOps++;
    }
    assertVisibleCount(engine, totalExpectedOps);
}
Also used : ShardId(org.elasticsearch.index.shard.ShardId) NoMergePolicy(org.apache.lucene.index.NoMergePolicy) Arrays(java.util.Arrays) Versions(org.elasticsearch.common.lucene.uid.Versions) LongSupplier(java.util.function.LongSupplier) PEER_RECOVERY(org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY) BigArrays(org.elasticsearch.common.util.BigArrays) IndexSettingsModule(org.elasticsearch.test.IndexSettingsModule) IndexMetadata(org.elasticsearch.cluster.metadata.IndexMetadata) Matchers.not(org.hamcrest.Matchers.not) Term(org.apache.lucene.index.Term) Level(org.apache.logging.log4j.Level) AlreadyClosedException(org.apache.lucene.store.AlreadyClosedException) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) LogEvent(org.apache.logging.log4j.core.LogEvent) ReferenceManager(org.apache.lucene.search.ReferenceManager) ParseContext(org.elasticsearch.index.mapper.ParseContext) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) SeqNoStats(org.elasticsearch.index.seqno.SeqNoStats) LOCAL_TRANSLOG_RECOVERY(org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY) RandomNumbers(com.carrotsearch.randomizedtesting.generators.RandomNumbers) MergePolicy(org.apache.lucene.index.MergePolicy) TermsEnum(org.apache.lucene.index.TermsEnum) Matchers.nullValue(org.hamcrest.Matchers.nullValue) Map(java.util.Map) TestTranslog(org.elasticsearch.index.translog.TestTranslog) CheckedRunnable(org.elasticsearch.common.CheckedRunnable) FieldsVisitor(org.elasticsearch.index.fieldvisitor.FieldsVisitor) Path(java.nio.file.Path) REPLICA(org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA) NumericDocValuesField(org.apache.lucene.document.NumericDocValuesField) Matchers.notNullValue(org.hamcrest.Matchers.notNullValue) SoftDeletesRetentionMergePolicy(org.apache.lucene.index.SoftDeletesRetentionMergePolicy) UUIDs(org.elasticsearch.common.UUIDs) Set(java.util.Set) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) SnapshotMatchers(org.elasticsearch.index.translog.SnapshotMatchers) UncheckedIOException(java.io.UncheckedIOException) PointValues(org.apache.lucene.index.PointValues) CountDownLatch(java.util.concurrent.CountDownLatch) SeqNoFieldMapper(org.elasticsearch.index.mapper.SeqNoFieldMapper) AbstractRunnable(org.elasticsearch.common.util.concurrent.AbstractRunnable) Logger(org.apache.logging.log4j.Logger) Matchers.contains(org.hamcrest.Matchers.contains) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) ReplicationTracker(org.elasticsearch.index.seqno.ReplicationTracker) Matchers.containsString(org.hamcrest.Matchers.containsString) TestShardRouting(org.elasticsearch.cluster.routing.TestShardRouting) IndexCommit(org.apache.lucene.index.IndexCommit) Tuple(io.crate.common.collections.Tuple) LiveIndexWriterConfig(org.apache.lucene.index.LiveIndexWriterConfig) LogDocMergePolicy(org.apache.lucene.index.LogDocMergePolicy) FixedBitSet(org.apache.lucene.util.FixedBitSet) RegexFilter(org.apache.logging.log4j.core.filter.RegexFilter) CodecService(org.elasticsearch.index.codec.CodecService) Mockito.spy(org.mockito.Mockito.spy) ShardRoutingState(org.elasticsearch.cluster.routing.ShardRoutingState) Supplier(java.util.function.Supplier) CheckedBiConsumer(org.elasticsearch.common.CheckedBiConsumer) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ToLongBiFunction(java.util.function.ToLongBiFunction) BytesArray(org.elasticsearch.common.bytes.BytesArray) RetentionLease(org.elasticsearch.index.seqno.RetentionLease) RetentionLeases(org.elasticsearch.index.seqno.RetentionLeases) Lock(org.apache.lucene.store.Lock) Store(org.elasticsearch.index.store.Store) Matchers.hasSize(org.hamcrest.Matchers.hasSize) Matchers.isIn(org.hamcrest.Matchers.isIn) Bits(org.apache.lucene.util.Bits) TranslogConfig(org.elasticsearch.index.translog.TranslogConfig) TieredMergePolicy(org.apache.lucene.index.TieredMergePolicy) Loggers(org.elasticsearch.common.logging.Loggers) TopDocs(org.apache.lucene.search.TopDocs) Matchers.greaterThanOrEqualTo(org.hamcrest.Matchers.greaterThanOrEqualTo) LongStream(java.util.stream.LongStream) SequenceNumbers(org.elasticsearch.index.seqno.SequenceNumbers) Files(java.nio.file.Files) IdFieldMapper(org.elasticsearch.index.mapper.IdFieldMapper) AbstractAppender(org.apache.logging.log4j.core.appender.AbstractAppender) DocIdAndSeqNo(org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo) IOUtils(io.crate.common.io.IOUtils) SequentialStoredFieldsLeafReader(org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader) IOException(java.io.IOException) BrokenBarrierException(java.util.concurrent.BrokenBarrierException) Test(org.junit.Test) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) AtomicLong(java.util.concurrent.atomic.AtomicLong) SourceFieldMapper(org.elasticsearch.index.mapper.SourceFieldMapper) VersionFieldMapper(org.elasticsearch.index.mapper.VersionFieldMapper) Matchers.hasItem(org.hamcrest.Matchers.hasItem) Matchers.sameInstance(org.hamcrest.Matchers.sameInstance) Phaser(java.util.concurrent.Phaser) Matcher(org.hamcrest.Matcher) TextField(org.apache.lucene.document.TextField) TranslogDeletionPolicies.createTranslogDeletionPolicy(org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy) Lucene87StoredFieldsFormat(org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat) ActionListener(org.elasticsearch.action.ActionListener) Randomness(org.elasticsearch.common.Randomness) Collections.shuffle(java.util.Collections.shuffle) CoreMatchers.is(org.hamcrest.CoreMatchers.is) ElasticsearchException(org.elasticsearch.ElasticsearchException) BiFunction(java.util.function.BiFunction) IndexableField(org.apache.lucene.index.IndexableField) ConcurrentCollections(org.elasticsearch.common.util.concurrent.ConcurrentCollections) StoredField(org.apache.lucene.document.StoredField) Matchers.hasKey(org.hamcrest.Matchers.hasKey) VersionType(org.elasticsearch.index.VersionType) Settings(org.elasticsearch.common.settings.Settings) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Directory(org.apache.lucene.store.Directory) ThreadPool(org.elasticsearch.threadpool.ThreadPool) LeafReaderContext(org.apache.lucene.index.LeafReaderContext) ShardUtils(org.elasticsearch.index.shard.ShardUtils) TotalHitCountCollector(org.apache.lucene.search.TotalHitCountCollector) ByteSizeValue(org.elasticsearch.common.unit.ByteSizeValue) CyclicBarrier(java.util.concurrent.CyclicBarrier) Terms(org.apache.lucene.index.Terms) Matchers.lessThanOrEqualTo(org.hamcrest.Matchers.lessThanOrEqualTo) BytesRef(org.apache.lucene.util.BytesRef) DirectoryReader(org.apache.lucene.index.DirectoryReader) UNASSIGNED_PRIMARY_TERM(org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM) UNASSIGNED_SEQ_NO(org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO) IndexShardRoutingTable(org.elasticsearch.cluster.routing.IndexShardRoutingTable) BytesReference(org.elasticsearch.common.bytes.BytesReference) Collectors(java.util.stream.Collectors) SegmentInfos(org.apache.lucene.index.SegmentInfos) Searcher(org.elasticsearch.index.engine.Engine.Searcher) MapperService(org.elasticsearch.index.mapper.MapperService) Base64(java.util.Base64) List(java.util.List) IndexWriter(org.apache.lucene.index.IndexWriter) Version(org.elasticsearch.Version) MatcherAssert(org.hamcrest.MatcherAssert) Matchers.containsInAnyOrder(org.hamcrest.Matchers.containsInAnyOrder) TriFunction(org.elasticsearch.common.TriFunction) Matchers.equalTo(org.hamcrest.Matchers.equalTo) LeafReader(org.apache.lucene.index.LeafReader) TimeValue(io.crate.common.unit.TimeValue) Queue(java.util.Queue) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig) LogByteSizeMergePolicy(org.apache.lucene.index.LogByteSizeMergePolicy) MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) IndexReader(org.apache.lucene.index.IndexReader) IndexSearcher(org.apache.lucene.search.IndexSearcher) ShardRouting(org.elasticsearch.cluster.routing.ShardRouting) LongPoint(org.apache.lucene.document.LongPoint) NumericDocValues(org.apache.lucene.index.NumericDocValues) PRIMARY(org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Document(org.elasticsearch.index.mapper.ParseContext.Document) HashMap(java.util.HashMap) VersionsAndSeqNoResolver(org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver) Lucene(org.elasticsearch.common.lucene.Lucene) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) TransportActions(org.elasticsearch.action.support.TransportActions) Strings(org.elasticsearch.common.Strings) HashSet(java.util.HashSet) LOCAL_RESET(org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_RESET) Charset(java.nio.charset.Charset) NoneCircuitBreakerService(org.elasticsearch.indices.breaker.NoneCircuitBreakerService) IndexSettings(org.elasticsearch.index.IndexSettings) LocalCheckpointTracker(org.elasticsearch.index.seqno.LocalCheckpointTracker) IntSupplier(java.util.function.IntSupplier) Matchers.empty(org.hamcrest.Matchers.empty) Iterator(java.util.Iterator) Uid(org.elasticsearch.index.mapper.Uid) Matchers(org.hamcrest.Matchers) Mockito.when(org.mockito.Mockito.when) VersionUtils(org.elasticsearch.test.VersionUtils) TimeUnit(java.util.concurrent.TimeUnit) TermQuery(org.apache.lucene.search.TermQuery) NO_OPS_PERFORMED(org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED) Field(org.apache.lucene.document.Field) Closeable(java.io.Closeable) Translog(org.elasticsearch.index.translog.Translog) Comparator(java.util.Comparator) Collections(java.util.Collections) LogManager(org.apache.logging.log4j.LogManager) TermQuery(org.apache.lucene.search.TermQuery) Searcher(org.elasticsearch.index.engine.Engine.Searcher) IndexSearcher(org.apache.lucene.search.IndexSearcher) ArrayList(java.util.ArrayList) Matchers.containsString(org.hamcrest.Matchers.containsString) Term(org.apache.lucene.index.Term) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) Document(org.elasticsearch.index.mapper.ParseContext.Document) LongPoint(org.apache.lucene.document.LongPoint) AtomicLong(java.util.concurrent.atomic.AtomicLong) ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) AtomicLong(java.util.concurrent.atomic.AtomicLong) TotalHitCountCollector(org.apache.lucene.search.TotalHitCountCollector) Test(org.junit.Test)

Example 70 with ParsedDocument

use of org.elasticsearch.index.mapper.ParsedDocument in project crate by crate.

the class InternalEngineTests method testIndexWriterInfoStream.

// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:
@Test
public void testIndexWriterInfoStream() throws IllegalAccessException, IOException {
    assumeFalse("who tests the tester?", VERBOSE);
    MockAppender mockAppender = new MockAppender("testIndexWriterInfoStream");
    mockAppender.start();
    Logger rootLogger = LogManager.getRootLogger();
    Level savedLevel = rootLogger.getLevel();
    Loggers.addAppender(rootLogger, mockAppender);
    Loggers.setLevel(rootLogger, Level.DEBUG);
    rootLogger = LogManager.getRootLogger();
    try {
        // First, with DEBUG, which should NOT log IndexWriter output:
        ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
        engine.index(indexForDoc(doc));
        engine.flush();
        assertFalse(mockAppender.sawIndexWriterMessage);
        // Again, with TRACE, which should log IndexWriter output:
        Loggers.setLevel(rootLogger, Level.TRACE);
        engine.index(indexForDoc(doc));
        engine.flush();
        assertTrue(mockAppender.sawIndexWriterMessage);
    } finally {
        Loggers.removeAppender(rootLogger, mockAppender);
        mockAppender.stop();
        Loggers.setLevel(rootLogger, savedLevel);
    }
}
Also used : ParsedDocument(org.elasticsearch.index.mapper.ParsedDocument) Level(org.apache.logging.log4j.Level) Logger(org.apache.logging.log4j.Logger) Test(org.junit.Test)

Aggregations

ParsedDocument (org.elasticsearch.index.mapper.ParsedDocument)211 Test (org.junit.Test)85 LongPoint (org.apache.lucene.document.LongPoint)59 BytesArray (org.elasticsearch.common.bytes.BytesArray)58 Matchers.containsString (org.hamcrest.Matchers.containsString)57 Store (org.elasticsearch.index.store.Store)52 Searcher (org.elasticsearch.index.engine.Engine.Searcher)46 DocumentMapper (org.elasticsearch.index.mapper.DocumentMapper)35 IOException (java.io.IOException)32 AtomicLong (java.util.concurrent.atomic.AtomicLong)31 MatchAllDocsQuery (org.apache.lucene.search.MatchAllDocsQuery)31 IndexableField (org.apache.lucene.index.IndexableField)30 Term (org.apache.lucene.index.Term)28 TopDocs (org.apache.lucene.search.TopDocs)28 NumericDocValuesField (org.apache.lucene.document.NumericDocValuesField)27 Index (org.elasticsearch.index.Index)27 UncheckedIOException (java.io.UncheckedIOException)26 Field (org.apache.lucene.document.Field)26 TextField (org.apache.lucene.document.TextField)26 ArrayList (java.util.ArrayList)25