use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestTermQueryPrefixGridStrategy method testNGramPrefixGridLosAngeles.
@Test
public void testNGramPrefixGridLosAngeles() throws IOException {
SpatialContext ctx = SpatialContext.GEO;
TermQueryPrefixTreeStrategy prefixGridStrategy = new TermQueryPrefixTreeStrategy(new QuadPrefixTree(ctx), "geo");
Shape point = ctx.makePoint(-118.243680, 34.052230);
Document losAngeles = new Document();
losAngeles.add(new StringField("name", "Los Angeles", Field.Store.YES));
for (Field field : prefixGridStrategy.createIndexableFields(point)) {
losAngeles.add(field);
}
//just for diagnostics
losAngeles.add(new StoredField(prefixGridStrategy.getFieldName(), point.toString()));
addDocumentsAndCommit(Arrays.asList(losAngeles));
// This won't work with simple spatial context...
SpatialArgsParser spatialArgsParser = new SpatialArgsParser();
// TODO... use a non polygon query
// SpatialArgs spatialArgs = spatialArgsParser.parse(
// "Intersects(POLYGON((-127.00390625 39.8125,-112.765625 39.98828125,-111.53515625 31.375,-125.94921875 30.14453125,-127.00390625 39.8125)))",
// new SimpleSpatialContext());
// Query query = prefixGridStrategy.makeQuery(spatialArgs, fieldInfo);
// SearchResults searchResults = executeQuery(query, 1);
// assertEquals(1, searchResults.numFound);
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestSuggestField method testReturnedDocID.
@Test
public void testReturnedDocID() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, num));
document.add(new StoredField("int_field", i));
iw.addDocument(document);
if (random().nextBoolean()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertEquals(num, suggest.totalHits);
for (SuggestScoreDoc suggestScoreDoc : suggest.scoreLookupDocs()) {
String key = suggestScoreDoc.key.toString();
assertTrue(key.startsWith("abc_"));
String substring = key.substring(4);
int fieldValue = Integer.parseInt(substring);
Document doc = reader.document(suggestScoreDoc.doc);
assertEquals(doc.getField("int_field").numericValue().intValue(), fieldValue);
}
reader.close();
iw.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestSuggestField method testMultipleSegments.
@Test
public void testMultipleSegments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
List<Entry> entries = new ArrayList<>();
// ensure at least some segments have no suggest field
for (int i = num; i > 0; i--) {
Document document = new Document();
if (random().nextInt(4) == 1) {
document.add(new SuggestField("suggest_field", "abc_" + i, i));
entries.add(new Entry("abc_" + i, i));
}
document.add(new StoredField("weight_fld", i));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, (entries.size() == 0) ? 1 : entries.size(), false);
assertSuggestions(suggest, entries.toArray(new Entry[entries.size()]));
reader.close();
iw.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestCompressingStoredFieldsFormat method testDeletePartiallyWrittenFilesIfAbort.
public void testDeletePartiallyWrittenFilesIfAbort() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
iwConf.setCodec(CompressingCodec.randomInstance(random()));
// disable CFS because this test checks file names
iwConf.setMergePolicy(newLogMergePolicy(false));
iwConf.setUseCompoundFile(false);
// Cannot use RIW because this test wants CFS to stay off:
IndexWriter iw = new IndexWriter(dir, iwConf);
final Document validDoc = new Document();
validDoc.add(new IntPoint("id", 0));
validDoc.add(new StoredField("id", 0));
iw.addDocument(validDoc);
iw.commit();
// make sure that #writeField will fail to trigger an abort
final Document invalidDoc = new Document();
FieldType fieldType = new FieldType();
fieldType.setStored(true);
invalidDoc.add(new Field("invalid", fieldType) {
@Override
public String stringValue() {
// abort the segment!! We should fix this.
return null;
}
});
try {
iw.addDocument(invalidDoc);
iw.commit();
} catch (IllegalArgumentException iae) {
// expected
assertEquals(iae, iw.getTragicException());
}
// Writer should be closed by tragedy
assertFalse(iw.isOpen());
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class SynonymTokenizer method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
//Not many use this setup:
a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
dir = newDirectory();
//Most tests use this setup:
analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
ramDir = newDirectory();
fieldType = random().nextBoolean() ? FIELD_TYPE_TV : TextField.TYPE_STORED;
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
for (String text : texts) {
writer.addDocument(doc(FIELD_NAME, text));
}
// a few tests need other docs...:
Document doc = new Document();
doc.add(new IntPoint(NUMERIC_FIELD_NAME, 1));
doc.add(new StoredField(NUMERIC_FIELD_NAME, 1));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint(NUMERIC_FIELD_NAME, 3));
doc.add(new StoredField(NUMERIC_FIELD_NAME, 3));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint(NUMERIC_FIELD_NAME, 5));
doc.add(new StoredField(NUMERIC_FIELD_NAME, 5));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint(NUMERIC_FIELD_NAME, 7));
doc.add(new StoredField(NUMERIC_FIELD_NAME, 7));
writer.addDocument(doc);
Document childDoc = doc(FIELD_NAME, "child document");
Document parentDoc = doc(FIELD_NAME, "parent document");
writer.addDocuments(Arrays.asList(childDoc, parentDoc));
writer.forceMerge(1);
writer.close();
reader = DirectoryReader.open(ramDir);
//Misc:
numHighlights = 0;
}
Aggregations