Search in sources :

Example 1 with MockVariableLengthPayloadFilter

use of org.apache.lucene.analysis.MockVariableLengthPayloadFilter in project lucene-solr by apache.

the class TestIndexWriterOnVMError method doTest.

// just one thread, serial merge policy, hopefully debuggable
private void doTest(MockDirectoryWrapper.Failure failOn) throws Exception {
    // log all exceptions we hit, in case we fail (for debugging)
    ByteArrayOutputStream exceptionLog = new ByteArrayOutputStream();
    PrintStream exceptionStream = new PrintStream(exceptionLog, true, "UTF-8");
    //PrintStream exceptionStream = System.out;
    final long analyzerSeed = random().nextLong();
    final Analyzer analyzer = new Analyzer() {

        @Override
        protected TokenStreamComponents createComponents(String fieldName) {
            MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
            // we are gonna make it angry
            tokenizer.setEnableChecks(false);
            TokenStream stream = tokenizer;
            // emit some payloads
            if (fieldName.contains("payloads")) {
                stream = new MockVariableLengthPayloadFilter(new Random(analyzerSeed), stream);
            }
            return new TokenStreamComponents(tokenizer, stream);
        }
    };
    MockDirectoryWrapper dir = null;
    final int numIterations = TEST_NIGHTLY ? atLeast(100) : atLeast(5);
    STARTOVER: for (int iter = 0; iter < numIterations; iter++) {
        try {
            // close from last run
            if (dir != null) {
                dir.close();
            }
            // disable slow things: we don't rely upon sleeps here.
            dir = newMockDirectory();
            dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
            dir.setUseSlowOpenClosers(false);
            IndexWriterConfig conf = newIndexWriterConfig(analyzer);
            // just for now, try to keep this test reproducible
            conf.setMergeScheduler(new SerialMergeScheduler());
            // test never makes it this far...
            int numDocs = atLeast(2000);
            IndexWriter iw = new IndexWriter(dir, conf);
            // ensure there is always a commit
            iw.commit();
            dir.failOn(failOn);
            for (int i = 0; i < numDocs; i++) {
                Document doc = new Document();
                doc.add(newStringField("id", Integer.toString(i), Field.Store.NO));
                doc.add(new NumericDocValuesField("dv", i));
                doc.add(new BinaryDocValuesField("dv2", new BytesRef(Integer.toString(i))));
                doc.add(new SortedDocValuesField("dv3", new BytesRef(Integer.toString(i))));
                doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i))));
                doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i - 1))));
                doc.add(new SortedNumericDocValuesField("dv5", i));
                doc.add(new SortedNumericDocValuesField("dv5", i - 1));
                doc.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
                // ensure we store something
                doc.add(new StoredField("stored1", "foo"));
                doc.add(new StoredField("stored1", "bar"));
                // ensure we get some payloads
                doc.add(newTextField("text_payloads", TestUtil.randomAnalysisString(random(), 6, true), Field.Store.NO));
                // ensure we get some vectors
                FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
                ft.setStoreTermVectors(true);
                doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
                doc.add(new IntPoint("point", random().nextInt()));
                doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
                if (random().nextInt(10) > 0) {
                    // single doc
                    try {
                        iw.addDocument(doc);
                        // we made it, sometimes delete our doc, or update a dv
                        int thingToDo = random().nextInt(4);
                        if (thingToDo == 0) {
                            iw.deleteDocuments(new Term("id", Integer.toString(i)));
                        } else if (thingToDo == 1) {
                            iw.updateNumericDocValue(new Term("id", Integer.toString(i)), "dv", i + 1L);
                        } else if (thingToDo == 2) {
                            iw.updateBinaryDocValue(new Term("id", Integer.toString(i)), "dv2", new BytesRef(Integer.toString(i + 1)));
                        }
                    } catch (VirtualMachineError | AlreadyClosedException disaster) {
                        getTragedy(disaster, iw, exceptionStream);
                        continue STARTOVER;
                    }
                } else {
                    // block docs
                    Document doc2 = new Document();
                    doc2.add(newStringField("id", Integer.toString(-i), Field.Store.NO));
                    doc2.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
                    doc2.add(new StoredField("stored1", "foo"));
                    doc2.add(new StoredField("stored1", "bar"));
                    doc2.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
                    try {
                        iw.addDocuments(Arrays.asList(doc, doc2));
                        // we made it, sometimes delete our docs
                        if (random().nextBoolean()) {
                            iw.deleteDocuments(new Term("id", Integer.toString(i)), new Term("id", Integer.toString(-i)));
                        }
                    } catch (VirtualMachineError | AlreadyClosedException disaster) {
                        getTragedy(disaster, iw, exceptionStream);
                        continue STARTOVER;
                    }
                }
                if (random().nextInt(10) == 0) {
                    // trigger flush:
                    try {
                        if (random().nextBoolean()) {
                            DirectoryReader ir = null;
                            try {
                                ir = DirectoryReader.open(iw, random().nextBoolean(), false);
                                TestUtil.checkReader(ir);
                            } finally {
                                IOUtils.closeWhileHandlingException(ir);
                            }
                        } else {
                            iw.commit();
                        }
                        if (DirectoryReader.indexExists(dir)) {
                            TestUtil.checkIndex(dir);
                        }
                    } catch (VirtualMachineError | AlreadyClosedException disaster) {
                        getTragedy(disaster, iw, exceptionStream);
                        continue STARTOVER;
                    }
                }
            }
            try {
                iw.close();
            } catch (VirtualMachineError | AlreadyClosedException disaster) {
                getTragedy(disaster, iw, exceptionStream);
                continue STARTOVER;
            }
        } catch (Throwable t) {
            System.out.println("Unexpected exception: dumping fake-exception-log:...");
            exceptionStream.flush();
            System.out.println(exceptionLog.toString("UTF-8"));
            System.out.flush();
            Rethrow.rethrow(t);
        }
    }
    dir.close();
    if (VERBOSE) {
        System.out.println("TEST PASSED: dumping fake-exception-log:...");
        System.out.println(exceptionLog.toString("UTF-8"));
    }
}
Also used : TokenStream(org.apache.lucene.analysis.TokenStream) Analyzer(org.apache.lucene.analysis.Analyzer) Document(org.apache.lucene.document.Document) StoredField(org.apache.lucene.document.StoredField) Random(java.util.Random) SortedNumericDocValuesField(org.apache.lucene.document.SortedNumericDocValuesField) NumericDocValuesField(org.apache.lucene.document.NumericDocValuesField) SortedDocValuesField(org.apache.lucene.document.SortedDocValuesField) BytesRef(org.apache.lucene.util.BytesRef) MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) PrintStream(java.io.PrintStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BinaryDocValuesField(org.apache.lucene.document.BinaryDocValuesField) IntPoint(org.apache.lucene.document.IntPoint) FieldType(org.apache.lucene.document.FieldType) MockTokenizer(org.apache.lucene.analysis.MockTokenizer) IntPoint(org.apache.lucene.document.IntPoint) SortedNumericDocValuesField(org.apache.lucene.document.SortedNumericDocValuesField) SortedSetDocValuesField(org.apache.lucene.document.SortedSetDocValuesField) MockVariableLengthPayloadFilter(org.apache.lucene.analysis.MockVariableLengthPayloadFilter)

Example 2 with MockVariableLengthPayloadFilter

use of org.apache.lucene.analysis.MockVariableLengthPayloadFilter in project lucene-solr by apache.

the class TestBlockPostingsFormat3 method test.

// creates 8 fields with different options and does "duels" of fields against each other
public void test() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new Analyzer(Analyzer.PER_FIELD_REUSE_STRATEGY) {

        @Override
        protected TokenStreamComponents createComponents(String fieldName) {
            Tokenizer tokenizer = new MockTokenizer();
            if (fieldName.contains("payloadsFixed")) {
                TokenFilter filter = new MockFixedLengthPayloadFilter(new Random(0), tokenizer, 1);
                return new TokenStreamComponents(tokenizer, filter);
            } else if (fieldName.contains("payloadsVariable")) {
                TokenFilter filter = new MockVariableLengthPayloadFilter(new Random(0), tokenizer);
                return new TokenStreamComponents(tokenizer, filter);
            } else {
                return new TokenStreamComponents(tokenizer);
            }
        }
    };
    IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
    iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene50PostingsFormat()));
    // TODO we could actually add more fields implemented with different PFs
    // or, just put this test into the usual rotation?
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
    Document doc = new Document();
    FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn this on for a cross-check
    docsOnlyType.setStoreTermVectors(true);
    docsOnlyType.setIndexOptions(IndexOptions.DOCS);
    FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn this on for a cross-check
    docsAndFreqsType.setStoreTermVectors(true);
    docsAndFreqsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
    FieldType positionsType = new FieldType(TextField.TYPE_NOT_STORED);
    // turn these on for a cross-check
    positionsType.setStoreTermVectors(true);
    positionsType.setStoreTermVectorPositions(true);
    positionsType.setStoreTermVectorOffsets(true);
    positionsType.setStoreTermVectorPayloads(true);
    FieldType offsetsType = new FieldType(positionsType);
    offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
    Field field1 = new Field("field1docs", "", docsOnlyType);
    Field field2 = new Field("field2freqs", "", docsAndFreqsType);
    Field field3 = new Field("field3positions", "", positionsType);
    Field field4 = new Field("field4offsets", "", offsetsType);
    Field field5 = new Field("field5payloadsFixed", "", positionsType);
    Field field6 = new Field("field6payloadsVariable", "", positionsType);
    Field field7 = new Field("field7payloadsFixedOffsets", "", offsetsType);
    Field field8 = new Field("field8payloadsVariableOffsets", "", offsetsType);
    doc.add(field1);
    doc.add(field2);
    doc.add(field3);
    doc.add(field4);
    doc.add(field5);
    doc.add(field6);
    doc.add(field7);
    doc.add(field8);
    for (int i = 0; i < MAXDOC; i++) {
        String stringValue = Integer.toString(i) + " verycommon " + English.intToEnglish(i).replace('-', ' ') + " " + TestUtil.randomSimpleString(random());
        field1.setStringValue(stringValue);
        field2.setStringValue(stringValue);
        field3.setStringValue(stringValue);
        field4.setStringValue(stringValue);
        field5.setStringValue(stringValue);
        field6.setStringValue(stringValue);
        field7.setStringValue(stringValue);
        field8.setStringValue(stringValue);
        iw.addDocument(doc);
    }
    iw.close();
    verify(dir);
    // for some extra coverage, checkIndex before we forceMerge
    TestUtil.checkIndex(dir);
    iwc = newIndexWriterConfig(analyzer);
    iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene50PostingsFormat()));
    iwc.setOpenMode(OpenMode.APPEND);
    IndexWriter iw2 = new IndexWriter(dir, iwc);
    iw2.forceMerge(1);
    iw2.close();
    verify(dir);
    dir.close();
}
Also used : Analyzer(org.apache.lucene.analysis.Analyzer) Document(org.apache.lucene.document.Document) FieldType(org.apache.lucene.document.FieldType) MockTokenizer(org.apache.lucene.analysis.MockTokenizer) Field(org.apache.lucene.document.Field) TextField(org.apache.lucene.document.TextField) Random(java.util.Random) IndexWriter(org.apache.lucene.index.IndexWriter) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) MockFixedLengthPayloadFilter(org.apache.lucene.analysis.MockFixedLengthPayloadFilter) Tokenizer(org.apache.lucene.analysis.Tokenizer) MockTokenizer(org.apache.lucene.analysis.MockTokenizer) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) Directory(org.apache.lucene.store.Directory) TokenFilter(org.apache.lucene.analysis.TokenFilter) MockVariableLengthPayloadFilter(org.apache.lucene.analysis.MockVariableLengthPayloadFilter) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig)

Example 3 with MockVariableLengthPayloadFilter

use of org.apache.lucene.analysis.MockVariableLengthPayloadFilter in project lucene-solr by apache.

the class TestIndexWriterExceptions2 method testBasics.

// just one thread, serial merge policy, hopefully debuggable
public void testBasics() throws Exception {
    // disable slow things: we don't rely upon sleeps here.
    Directory dir = newDirectory();
    if (dir instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
        ((MockDirectoryWrapper) dir).setUseSlowOpenClosers(false);
    }
    // log all exceptions we hit, in case we fail (for debugging)
    ByteArrayOutputStream exceptionLog = new ByteArrayOutputStream();
    PrintStream exceptionStream = new PrintStream(exceptionLog, true, "UTF-8");
    //PrintStream exceptionStream = System.out;
    // create lots of non-aborting exceptions with a broken analyzer
    final long analyzerSeed = random().nextLong();
    Analyzer analyzer = new Analyzer() {

        @Override
        protected TokenStreamComponents createComponents(String fieldName) {
            MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, false);
            // TODO: can we turn this on? our filter is probably too evil
            tokenizer.setEnableChecks(false);
            TokenStream stream = tokenizer;
            // emit some payloads
            if (fieldName.contains("payloads")) {
                stream = new MockVariableLengthPayloadFilter(new Random(analyzerSeed), stream);
            }
            stream = new CrankyTokenFilter(stream, new Random(analyzerSeed));
            return new TokenStreamComponents(tokenizer, stream);
        }
    };
    // create lots of aborting exceptions with a broken codec
    // we don't need a random codec, as we aren't trying to find bugs in the codec here.
    Codec inner = RANDOM_MULTIPLIER > 1 ? Codec.getDefault() : new AssertingCodec();
    Codec codec = new CrankyCodec(inner, new Random(random().nextLong()));
    IndexWriterConfig conf = newIndexWriterConfig(analyzer);
    // just for now, try to keep this test reproducible
    conf.setMergeScheduler(new SerialMergeScheduler());
    conf.setCodec(codec);
    int numDocs = atLeast(500);
    IndexWriter iw = new IndexWriter(dir, conf);
    try {
        boolean allowAlreadyClosed = false;
        for (int i = 0; i < numDocs; i++) {
            // TODO: add crankyDocValuesFields, etc
            Document doc = new Document();
            doc.add(newStringField("id", Integer.toString(i), Field.Store.NO));
            doc.add(new NumericDocValuesField("dv", i));
            doc.add(new BinaryDocValuesField("dv2", new BytesRef(Integer.toString(i))));
            doc.add(new SortedDocValuesField("dv3", new BytesRef(Integer.toString(i))));
            doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i))));
            doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i - 1))));
            doc.add(new SortedNumericDocValuesField("dv5", i));
            doc.add(new SortedNumericDocValuesField("dv5", i - 1));
            doc.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
            // ensure we store something
            doc.add(new StoredField("stored1", "foo"));
            doc.add(new StoredField("stored1", "bar"));
            // ensure we get some payloads
            doc.add(newTextField("text_payloads", TestUtil.randomAnalysisString(random(), 6, true), Field.Store.NO));
            // ensure we get some vectors
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
            ft.setStoreTermVectors(true);
            doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
            doc.add(new IntPoint("point", random().nextInt()));
            doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
            if (random().nextInt(10) > 0) {
                // single doc
                try {
                    iw.addDocument(doc);
                    // we made it, sometimes delete our doc, or update a dv
                    int thingToDo = random().nextInt(4);
                    if (thingToDo == 0) {
                        iw.deleteDocuments(new Term("id", Integer.toString(i)));
                    } else if (thingToDo == 1) {
                        iw.updateNumericDocValue(new Term("id", Integer.toString(i)), "dv", i + 1L);
                    } else if (thingToDo == 2) {
                        iw.updateBinaryDocValue(new Term("id", Integer.toString(i)), "dv2", new BytesRef(Integer.toString(i + 1)));
                    }
                } catch (AlreadyClosedException ace) {
                    // OK: writer was closed by abort; we just reopen now:
                    assertTrue(iw.deleter.isClosed());
                    assertTrue(allowAlreadyClosed);
                    allowAlreadyClosed = false;
                    conf = newIndexWriterConfig(analyzer);
                    // just for now, try to keep this test reproducible
                    conf.setMergeScheduler(new SerialMergeScheduler());
                    conf.setCodec(codec);
                    iw = new IndexWriter(dir, conf);
                } catch (Exception e) {
                    if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
                        exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
                        e.printStackTrace(exceptionStream);
                        allowAlreadyClosed = true;
                    } else {
                        Rethrow.rethrow(e);
                    }
                }
            } else {
                // block docs
                Document doc2 = new Document();
                doc2.add(newStringField("id", Integer.toString(-i), Field.Store.NO));
                doc2.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
                doc2.add(new StoredField("stored1", "foo"));
                doc2.add(new StoredField("stored1", "bar"));
                doc2.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
                try {
                    iw.addDocuments(Arrays.asList(doc, doc2));
                    // we made it, sometimes delete our docs
                    if (random().nextBoolean()) {
                        iw.deleteDocuments(new Term("id", Integer.toString(i)), new Term("id", Integer.toString(-i)));
                    }
                } catch (AlreadyClosedException ace) {
                    // OK: writer was closed by abort; we just reopen now:
                    assertTrue(iw.deleter.isClosed());
                    assertTrue(allowAlreadyClosed);
                    allowAlreadyClosed = false;
                    conf = newIndexWriterConfig(analyzer);
                    // just for now, try to keep this test reproducible
                    conf.setMergeScheduler(new SerialMergeScheduler());
                    conf.setCodec(codec);
                    iw = new IndexWriter(dir, conf);
                } catch (Exception e) {
                    if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
                        exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
                        e.printStackTrace(exceptionStream);
                        allowAlreadyClosed = true;
                    } else {
                        Rethrow.rethrow(e);
                    }
                }
            }
            if (random().nextInt(10) == 0) {
                // trigger flush:
                try {
                    if (random().nextBoolean()) {
                        DirectoryReader ir = null;
                        try {
                            ir = DirectoryReader.open(iw, random().nextBoolean(), false);
                            TestUtil.checkReader(ir);
                        } finally {
                            IOUtils.closeWhileHandlingException(ir);
                        }
                    } else {
                        iw.commit();
                    }
                    if (DirectoryReader.indexExists(dir)) {
                        TestUtil.checkIndex(dir);
                    }
                } catch (AlreadyClosedException ace) {
                    // OK: writer was closed by abort; we just reopen now:
                    assertTrue(iw.deleter.isClosed());
                    assertTrue(allowAlreadyClosed);
                    allowAlreadyClosed = false;
                    conf = newIndexWriterConfig(analyzer);
                    // just for now, try to keep this test reproducible
                    conf.setMergeScheduler(new SerialMergeScheduler());
                    conf.setCodec(codec);
                    iw = new IndexWriter(dir, conf);
                } catch (Exception e) {
                    if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
                        exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
                        e.printStackTrace(exceptionStream);
                        allowAlreadyClosed = true;
                    } else {
                        Rethrow.rethrow(e);
                    }
                }
            }
        }
        try {
            iw.close();
        } catch (Exception e) {
            if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
                exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
                e.printStackTrace(exceptionStream);
                try {
                    iw.rollback();
                } catch (Throwable t) {
                }
            } else {
                Rethrow.rethrow(e);
            }
        }
        dir.close();
    } catch (Throwable t) {
        System.out.println("Unexpected exception: dumping fake-exception-log:...");
        exceptionStream.flush();
        System.out.println(exceptionLog.toString("UTF-8"));
        System.out.flush();
        Rethrow.rethrow(t);
    }
    if (VERBOSE) {
        System.out.println("TEST PASSED: dumping fake-exception-log:...");
        System.out.println(exceptionLog.toString("UTF-8"));
    }
}
Also used : TokenStream(org.apache.lucene.analysis.TokenStream) CrankyTokenFilter(org.apache.lucene.analysis.CrankyTokenFilter) AlreadyClosedException(org.apache.lucene.store.AlreadyClosedException) Analyzer(org.apache.lucene.analysis.Analyzer) Document(org.apache.lucene.document.Document) CrankyCodec(org.apache.lucene.codecs.cranky.CrankyCodec) AssertingCodec(org.apache.lucene.codecs.asserting.AssertingCodec) Codec(org.apache.lucene.codecs.Codec) StoredField(org.apache.lucene.document.StoredField) Random(java.util.Random) SortedNumericDocValuesField(org.apache.lucene.document.SortedNumericDocValuesField) NumericDocValuesField(org.apache.lucene.document.NumericDocValuesField) SortedDocValuesField(org.apache.lucene.document.SortedDocValuesField) CrankyCodec(org.apache.lucene.codecs.cranky.CrankyCodec) BytesRef(org.apache.lucene.util.BytesRef) Directory(org.apache.lucene.store.Directory) MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) AssertingCodec(org.apache.lucene.codecs.asserting.AssertingCodec) PrintStream(java.io.PrintStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BinaryDocValuesField(org.apache.lucene.document.BinaryDocValuesField) IntPoint(org.apache.lucene.document.IntPoint) AlreadyClosedException(org.apache.lucene.store.AlreadyClosedException) FieldType(org.apache.lucene.document.FieldType) MockTokenizer(org.apache.lucene.analysis.MockTokenizer) IntPoint(org.apache.lucene.document.IntPoint) SortedNumericDocValuesField(org.apache.lucene.document.SortedNumericDocValuesField) SortedSetDocValuesField(org.apache.lucene.document.SortedSetDocValuesField) MockVariableLengthPayloadFilter(org.apache.lucene.analysis.MockVariableLengthPayloadFilter)

Aggregations

Random (java.util.Random)3 Analyzer (org.apache.lucene.analysis.Analyzer)3 MockTokenizer (org.apache.lucene.analysis.MockTokenizer)3 MockVariableLengthPayloadFilter (org.apache.lucene.analysis.MockVariableLengthPayloadFilter)3 Document (org.apache.lucene.document.Document)3 FieldType (org.apache.lucene.document.FieldType)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 PrintStream (java.io.PrintStream)2 TokenStream (org.apache.lucene.analysis.TokenStream)2 BinaryDocValuesField (org.apache.lucene.document.BinaryDocValuesField)2 IntPoint (org.apache.lucene.document.IntPoint)2 NumericDocValuesField (org.apache.lucene.document.NumericDocValuesField)2 SortedDocValuesField (org.apache.lucene.document.SortedDocValuesField)2 SortedNumericDocValuesField (org.apache.lucene.document.SortedNumericDocValuesField)2 SortedSetDocValuesField (org.apache.lucene.document.SortedSetDocValuesField)2 StoredField (org.apache.lucene.document.StoredField)2 Directory (org.apache.lucene.store.Directory)2 MockDirectoryWrapper (org.apache.lucene.store.MockDirectoryWrapper)2 BytesRef (org.apache.lucene.util.BytesRef)2 CrankyTokenFilter (org.apache.lucene.analysis.CrankyTokenFilter)1