Search in sources :

Example 1 with MockAnalyzer

use of org.apache.lucene.tests.analysis.MockAnalyzer in project OpenSearch by opensearch-project.

the class SliceBuilderTests method testToFilterWithRouting.

public void testToFilterWithRouting() throws IOException {
    Directory dir = new ByteBuffersDirectory();
    try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
        writer.commit();
    }
    ClusterService clusterService = mock(ClusterService.class);
    ClusterState state = mock(ClusterState.class);
    when(state.metadata()).thenReturn(Metadata.EMPTY_METADATA);
    when(clusterService.state()).thenReturn(state);
    OperationRouting routing = mock(OperationRouting.class);
    GroupShardsIterator<ShardIterator> it = new GroupShardsIterator<>(Collections.singletonList(new PlainShardIterator(new ShardId("index", "index", 1), Collections.emptyList())));
    when(routing.searchShards(any(), any(), any(), any())).thenReturn(it);
    when(clusterService.operationRouting()).thenReturn(routing);
    when(clusterService.getSettings()).thenReturn(Settings.EMPTY);
    try (IndexReader reader = DirectoryReader.open(dir)) {
        Version version = VersionUtils.randomCompatibleVersion(random(), Version.CURRENT);
        QueryShardContext context = createShardContext(version, reader, "field", DocValuesType.SORTED, 5, 0);
        SliceBuilder builder = new SliceBuilder("field", 6, 10);
        String[] routings = new String[] { "foo" };
        Query query = builder.toFilter(clusterService, createRequest(1, routings, null), context, version);
        assertEquals(new DocValuesSliceQuery("field", 6, 10), query);
        query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context, version);
        assertEquals(new DocValuesSliceQuery("field", 6, 10), query);
    }
}
Also used : ClusterState(org.opensearch.cluster.ClusterState) Query(org.apache.lucene.search.Query) MatchNoDocsQuery(org.apache.lucene.search.MatchNoDocsQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) PlainShardIterator(org.opensearch.cluster.routing.PlainShardIterator) Matchers.containsString(org.hamcrest.Matchers.containsString) ShardId(org.opensearch.index.shard.ShardId) ClusterService(org.opensearch.cluster.service.ClusterService) MockAnalyzer(org.apache.lucene.tests.analysis.MockAnalyzer) GroupShardsIterator(org.opensearch.cluster.routing.GroupShardsIterator) IndexWriter(org.apache.lucene.index.IndexWriter) Version(org.opensearch.Version) ByteBuffersDirectory(org.apache.lucene.store.ByteBuffersDirectory) OperationRouting(org.opensearch.cluster.routing.OperationRouting) PlainShardIterator(org.opensearch.cluster.routing.PlainShardIterator) ShardIterator(org.opensearch.cluster.routing.ShardIterator) IndexReader(org.apache.lucene.index.IndexReader) QueryShardContext(org.opensearch.index.query.QueryShardContext) Directory(org.apache.lucene.store.Directory) ByteBuffersDirectory(org.apache.lucene.store.ByteBuffersDirectory)

Example 2 with MockAnalyzer

use of org.apache.lucene.tests.analysis.MockAnalyzer in project OpenSearch by opensearch-project.

the class SliceBuilderTests method testInvalidField.

public void testInvalidField() throws IOException {
    Directory dir = new ByteBuffersDirectory();
    try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
        writer.commit();
    }
    try (IndexReader reader = DirectoryReader.open(dir)) {
        QueryShardContext context = createShardContext(Version.CURRENT, reader, "field", null, 1, 0);
        SliceBuilder builder = new SliceBuilder("field", 5, 10);
        IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.toFilter(null, createRequest(0), context, Version.CURRENT));
        assertThat(exc.getMessage(), containsString("cannot load numeric doc values"));
    }
}
Also used : MockAnalyzer(org.apache.lucene.tests.analysis.MockAnalyzer) IndexWriter(org.apache.lucene.index.IndexWriter) ByteBuffersDirectory(org.apache.lucene.store.ByteBuffersDirectory) IndexReader(org.apache.lucene.index.IndexReader) QueryShardContext(org.opensearch.index.query.QueryShardContext) Directory(org.apache.lucene.store.Directory) ByteBuffersDirectory(org.apache.lucene.store.ByteBuffersDirectory)

Example 3 with MockAnalyzer

use of org.apache.lucene.tests.analysis.MockAnalyzer in project OpenSearch by opensearch-project.

the class StoreTests method testNewChecksums.

public void testNewChecksums() throws IOException {
    final ShardId shardId = new ShardId("index", "_na_", 1);
    Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId));
    // set default codec - all segments need checksums
    IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec()));
    int docs = 1 + random().nextInt(100);
    for (int i = 0; i < docs; i++) {
        Document doc = new Document();
        doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new SortedDocValuesField("dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
        writer.addDocument(doc);
    }
    if (random().nextBoolean()) {
        for (int i = 0; i < docs; i++) {
            if (random().nextBoolean()) {
                Document doc = new Document();
                doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
                doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
                writer.updateDocument(new Term("id", "" + i), doc);
            }
        }
    }
    if (random().nextBoolean()) {
        // flush
        DirectoryReader.open(writer).close();
    }
    Store.MetadataSnapshot metadata;
    // check before we committed
    try {
        store.getMetadata();
        fail("no index present - expected exception");
    } catch (IndexNotFoundException ex) {
    // expected
    }
    writer.commit();
    writer.close();
    metadata = store.getMetadata();
    assertThat(metadata.asMap().isEmpty(), is(false));
    for (StoreFileMetadata meta : metadata) {
        try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) {
            String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input));
            assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum));
            assertThat(meta.writtenBy(), equalTo(Version.LATEST));
            if (meta.name().endsWith(".si") || meta.name().startsWith("segments_")) {
                assertThat(meta.hash().length, greaterThan(0));
            }
        }
    }
    assertConsistent(store, metadata);
    TestUtil.checkIndex(store.directory());
    assertDeleteContent(store, store.directory());
    IOUtils.close(store);
}
Also used : Term(org.apache.lucene.index.Term) Matchers.containsString(org.hamcrest.Matchers.containsString) Document(org.apache.lucene.document.Document) ShardId(org.opensearch.index.shard.ShardId) MockAnalyzer(org.apache.lucene.tests.analysis.MockAnalyzer) IndexWriter(org.apache.lucene.index.IndexWriter) SortedDocValuesField(org.apache.lucene.document.SortedDocValuesField) TextField(org.apache.lucene.document.TextField) IndexNotFoundException(org.apache.lucene.index.IndexNotFoundException) ChecksumIndexInput(org.apache.lucene.store.ChecksumIndexInput) IndexInput(org.apache.lucene.store.IndexInput) DummyShardLock(org.opensearch.test.DummyShardLock) BytesRef(org.apache.lucene.util.BytesRef)

Example 4 with MockAnalyzer

use of org.apache.lucene.tests.analysis.MockAnalyzer in project OpenSearch by opensearch-project.

the class StoreTests method testUserDataRead.

public void testUserDataRead() throws IOException {
    final ShardId shardId = new ShardId("index", "_na_", 1);
    Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId));
    IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec());
    SnapshotDeletionPolicy deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
    config.setIndexDeletionPolicy(deletionPolicy);
    IndexWriter writer = new IndexWriter(store.directory(), config);
    Document doc = new Document();
    doc.add(new TextField("id", "1", Field.Store.NO));
    writer.addDocument(doc);
    Map<String, String> commitData = new HashMap<>(2);
    String syncId = "a sync id";
    commitData.put(Engine.SYNC_COMMIT_ID, syncId);
    writer.setLiveCommitData(commitData.entrySet());
    writer.commit();
    writer.close();
    Store.MetadataSnapshot metadata;
    metadata = store.getMetadata(randomBoolean() ? null : deletionPolicy.snapshot());
    assertFalse(metadata.asMap().isEmpty());
    // do not check for correct files, we have enough tests for that above
    assertThat(metadata.getCommitUserData().get(Engine.SYNC_COMMIT_ID), equalTo(syncId));
    TestUtil.checkIndex(store.directory());
    assertDeleteContent(store, store.directory());
    IOUtils.close(store);
}
Also used : KeepOnlyLastCommitDeletionPolicy(org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy) HashMap(java.util.HashMap) Matchers.containsString(org.hamcrest.Matchers.containsString) Document(org.apache.lucene.document.Document) SnapshotDeletionPolicy(org.apache.lucene.index.SnapshotDeletionPolicy) ShardId(org.opensearch.index.shard.ShardId) MockAnalyzer(org.apache.lucene.tests.analysis.MockAnalyzer) IndexWriter(org.apache.lucene.index.IndexWriter) TextField(org.apache.lucene.document.TextField) DummyShardLock(org.opensearch.test.DummyShardLock) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig)

Example 5 with MockAnalyzer

use of org.apache.lucene.tests.analysis.MockAnalyzer in project OpenSearch by opensearch-project.

the class StoreTests method testCleanupFromSnapshot.

public void testCleanupFromSnapshot() throws IOException {
    final ShardId shardId = new ShardId("index", "_na_", 1);
    Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId));
    // this time random codec....
    IndexWriterConfig indexWriterConfig = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec());
    // we keep all commits and that allows us clean based on multiple snapshots
    indexWriterConfig.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
    IndexWriter writer = new IndexWriter(store.directory(), indexWriterConfig);
    int docs = 1 + random().nextInt(100);
    int numCommits = 0;
    for (int i = 0; i < docs; i++) {
        if (i > 0 && randomIntBetween(0, 10) == 0) {
            writer.commit();
            numCommits++;
        }
        Document doc = new Document();
        doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new SortedDocValuesField("dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
        writer.addDocument(doc);
    }
    if (numCommits < 1) {
        writer.commit();
        Document doc = new Document();
        doc.add(new TextField("id", "" + docs++, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
        doc.add(new SortedDocValuesField("dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
        writer.addDocument(doc);
    }
    Store.MetadataSnapshot firstMeta = store.getMetadata();
    if (random().nextBoolean()) {
        for (int i = 0; i < docs; i++) {
            if (random().nextBoolean()) {
                Document doc = new Document();
                doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
                doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
                writer.updateDocument(new Term("id", "" + i), doc);
            }
        }
    }
    writer.commit();
    writer.close();
    Store.MetadataSnapshot secondMeta = store.getMetadata();
    if (randomBoolean()) {
        store.cleanupAndVerify("test", firstMeta);
        String[] strings = store.directory().listAll();
        int numNotFound = 0;
        for (String file : strings) {
            if (file.startsWith("extra")) {
                continue;
            }
            assertTrue(firstMeta.contains(file) || file.equals("write.lock"));
            if (secondMeta.contains(file) == false) {
                numNotFound++;
            }
        }
        assertTrue("at least one file must not be in here since we have two commits?", numNotFound > 0);
    } else {
        store.cleanupAndVerify("test", secondMeta);
        String[] strings = store.directory().listAll();
        int numNotFound = 0;
        for (String file : strings) {
            if (file.startsWith("extra")) {
                continue;
            }
            assertTrue(file, secondMeta.contains(file) || file.equals("write.lock"));
            if (firstMeta.contains(file) == false) {
                numNotFound++;
            }
        }
        assertTrue("at least one file must not be in here since we have two commits?", numNotFound > 0);
    }
    deleteContent(store.directory());
    IOUtils.close(store);
}
Also used : Term(org.apache.lucene.index.Term) Matchers.containsString(org.hamcrest.Matchers.containsString) Document(org.apache.lucene.document.Document) ShardId(org.opensearch.index.shard.ShardId) MockAnalyzer(org.apache.lucene.tests.analysis.MockAnalyzer) IndexWriter(org.apache.lucene.index.IndexWriter) SortedDocValuesField(org.apache.lucene.document.SortedDocValuesField) TextField(org.apache.lucene.document.TextField) DummyShardLock(org.opensearch.test.DummyShardLock) BytesRef(org.apache.lucene.util.BytesRef) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig)

Aggregations

MockAnalyzer (org.apache.lucene.tests.analysis.MockAnalyzer)18 IndexWriter (org.apache.lucene.index.IndexWriter)15 Directory (org.apache.lucene.store.Directory)10 Document (org.apache.lucene.document.Document)9 Term (org.apache.lucene.index.Term)9 TextField (org.apache.lucene.document.TextField)8 IndexReader (org.apache.lucene.index.IndexReader)8 Query (org.apache.lucene.search.Query)7 IndexWriterConfig (org.apache.lucene.index.IndexWriterConfig)5 ByteBuffersDirectory (org.apache.lucene.store.ByteBuffersDirectory)5 Matchers.containsString (org.hamcrest.Matchers.containsString)5 QueryShardContext (org.opensearch.index.query.QueryShardContext)5 SortedDocValuesField (org.apache.lucene.document.SortedDocValuesField)4 IndexSearcher (org.apache.lucene.search.IndexSearcher)4 BytesRef (org.apache.lucene.util.BytesRef)4 ShardId (org.opensearch.index.shard.ShardId)4 ArrayList (java.util.ArrayList)3 DummyShardLock (org.opensearch.test.DummyShardLock)3 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2