use of org.apache.lucene.document.IntPoint in project lucene-solr by apache.
the class LineFileDocs method nextDoc.
/** Note: Document instance is re-used per-thread */
public Document nextDoc() throws IOException {
String line;
synchronized (this) {
line = reader.readLine();
if (line == null) {
// Always rewind at end:
if (LuceneTestCase.VERBOSE) {
System.out.println("TEST: LineFileDocs: now rewind file...");
}
reader.close();
reader = null;
open(null);
line = reader.readLine();
}
}
DocState docState = threadDocs.get();
if (docState == null) {
docState = new DocState();
threadDocs.set(docState);
}
int spot = line.indexOf(SEP);
if (spot == -1) {
throw new RuntimeException("line: [" + line + "] is in an invalid format !");
}
int spot2 = line.indexOf(SEP, 1 + spot);
if (spot2 == -1) {
throw new RuntimeException("line: [" + line + "] is in an invalid format !");
}
docState.body.setStringValue(line.substring(1 + spot2, line.length()));
final String title = line.substring(0, spot);
docState.title.setStringValue(title);
if (docState.titleDV != null) {
docState.titleDV.setBytesValue(new BytesRef(title));
}
docState.titleTokenized.setStringValue(title);
docState.date.setStringValue(line.substring(1 + spot, spot2));
final int i = id.getAndIncrement();
docState.id.setStringValue(Integer.toString(i));
docState.idNum.setIntValue(i);
if (docState.idNumDV != null) {
docState.idNumDV.setLongValue(i);
}
if (random.nextInt(5) == 4) {
// Make some sparse fields
Document doc = new Document();
for (IndexableField field : docState.doc) {
doc.add(field);
}
if (random.nextInt(3) == 1) {
int x = random.nextInt(4);
doc.add(new IntPoint("docLength" + x, line.length()));
}
if (random.nextInt(3) == 1) {
int x = random.nextInt(4);
doc.add(new IntPoint("docTitleLength" + x, title.length()));
}
if (random.nextInt(3) == 1) {
int x = random.nextInt(4);
doc.add(new NumericDocValuesField("docLength" + x, line.length()));
}
// TODO: more random sparse fields here too
}
return docState.doc;
}
use of org.apache.lucene.document.IntPoint in project lucene-solr by apache.
the class TestIndexWriterExceptions2 method testBasics.
// just one thread, serial merge policy, hopefully debuggable
public void testBasics() throws Exception {
// disable slow things: we don't rely upon sleeps here.
Directory dir = newDirectory();
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
((MockDirectoryWrapper) dir).setUseSlowOpenClosers(false);
}
// log all exceptions we hit, in case we fail (for debugging)
ByteArrayOutputStream exceptionLog = new ByteArrayOutputStream();
PrintStream exceptionStream = new PrintStream(exceptionLog, true, "UTF-8");
//PrintStream exceptionStream = System.out;
// create lots of non-aborting exceptions with a broken analyzer
final long analyzerSeed = random().nextLong();
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, false);
// TODO: can we turn this on? our filter is probably too evil
tokenizer.setEnableChecks(false);
TokenStream stream = tokenizer;
// emit some payloads
if (fieldName.contains("payloads")) {
stream = new MockVariableLengthPayloadFilter(new Random(analyzerSeed), stream);
}
stream = new CrankyTokenFilter(stream, new Random(analyzerSeed));
return new TokenStreamComponents(tokenizer, stream);
}
};
// create lots of aborting exceptions with a broken codec
// we don't need a random codec, as we aren't trying to find bugs in the codec here.
Codec inner = RANDOM_MULTIPLIER > 1 ? Codec.getDefault() : new AssertingCodec();
Codec codec = new CrankyCodec(inner, new Random(random().nextLong()));
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
conf.setCodec(codec);
int numDocs = atLeast(500);
IndexWriter iw = new IndexWriter(dir, conf);
try {
boolean allowAlreadyClosed = false;
for (int i = 0; i < numDocs; i++) {
// TODO: add crankyDocValuesFields, etc
Document doc = new Document();
doc.add(newStringField("id", Integer.toString(i), Field.Store.NO));
doc.add(new NumericDocValuesField("dv", i));
doc.add(new BinaryDocValuesField("dv2", new BytesRef(Integer.toString(i))));
doc.add(new SortedDocValuesField("dv3", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i - 1))));
doc.add(new SortedNumericDocValuesField("dv5", i));
doc.add(new SortedNumericDocValuesField("dv5", i - 1));
doc.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
// ensure we store something
doc.add(new StoredField("stored1", "foo"));
doc.add(new StoredField("stored1", "bar"));
// ensure we get some payloads
doc.add(newTextField("text_payloads", TestUtil.randomAnalysisString(random(), 6, true), Field.Store.NO));
// ensure we get some vectors
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc
try {
iw.addDocument(doc);
// we made it, sometimes delete our doc, or update a dv
int thingToDo = random().nextInt(4);
if (thingToDo == 0) {
iw.deleteDocuments(new Term("id", Integer.toString(i)));
} else if (thingToDo == 1) {
iw.updateNumericDocValue(new Term("id", Integer.toString(i)), "dv", i + 1L);
} else if (thingToDo == 2) {
iw.updateBinaryDocValue(new Term("id", Integer.toString(i)), "dv2", new BytesRef(Integer.toString(i + 1)));
}
} catch (AlreadyClosedException ace) {
// OK: writer was closed by abort; we just reopen now:
assertTrue(iw.deleter.isClosed());
assertTrue(allowAlreadyClosed);
allowAlreadyClosed = false;
conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
conf.setCodec(codec);
iw = new IndexWriter(dir, conf);
} catch (Exception e) {
if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
e.printStackTrace(exceptionStream);
allowAlreadyClosed = true;
} else {
Rethrow.rethrow(e);
}
}
} else {
// block docs
Document doc2 = new Document();
doc2.add(newStringField("id", Integer.toString(-i), Field.Store.NO));
doc2.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
doc2.add(new StoredField("stored1", "foo"));
doc2.add(new StoredField("stored1", "bar"));
doc2.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
try {
iw.addDocuments(Arrays.asList(doc, doc2));
// we made it, sometimes delete our docs
if (random().nextBoolean()) {
iw.deleteDocuments(new Term("id", Integer.toString(i)), new Term("id", Integer.toString(-i)));
}
} catch (AlreadyClosedException ace) {
// OK: writer was closed by abort; we just reopen now:
assertTrue(iw.deleter.isClosed());
assertTrue(allowAlreadyClosed);
allowAlreadyClosed = false;
conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
conf.setCodec(codec);
iw = new IndexWriter(dir, conf);
} catch (Exception e) {
if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
e.printStackTrace(exceptionStream);
allowAlreadyClosed = true;
} else {
Rethrow.rethrow(e);
}
}
}
if (random().nextInt(10) == 0) {
// trigger flush:
try {
if (random().nextBoolean()) {
DirectoryReader ir = null;
try {
ir = DirectoryReader.open(iw, random().nextBoolean(), false);
TestUtil.checkReader(ir);
} finally {
IOUtils.closeWhileHandlingException(ir);
}
} else {
iw.commit();
}
if (DirectoryReader.indexExists(dir)) {
TestUtil.checkIndex(dir);
}
} catch (AlreadyClosedException ace) {
// OK: writer was closed by abort; we just reopen now:
assertTrue(iw.deleter.isClosed());
assertTrue(allowAlreadyClosed);
allowAlreadyClosed = false;
conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
conf.setCodec(codec);
iw = new IndexWriter(dir, conf);
} catch (Exception e) {
if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
e.printStackTrace(exceptionStream);
allowAlreadyClosed = true;
} else {
Rethrow.rethrow(e);
}
}
}
}
try {
iw.close();
} catch (Exception e) {
if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());
e.printStackTrace(exceptionStream);
try {
iw.rollback();
} catch (Throwable t) {
}
} else {
Rethrow.rethrow(e);
}
}
dir.close();
} catch (Throwable t) {
System.out.println("Unexpected exception: dumping fake-exception-log:...");
exceptionStream.flush();
System.out.println(exceptionLog.toString("UTF-8"));
System.out.flush();
Rethrow.rethrow(t);
}
if (VERBOSE) {
System.out.println("TEST PASSED: dumping fake-exception-log:...");
System.out.println(exceptionLog.toString("UTF-8"));
}
}
use of org.apache.lucene.document.IntPoint in project lucene-solr by apache.
the class TestIndexWriterOnDiskFull method addDocWithIndex.
private void addDocWithIndex(IndexWriter writer, int index) throws IOException {
Document doc = new Document();
doc.add(newTextField("content", "aaa " + index, Field.Store.NO));
doc.add(newTextField("id", "" + index, Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
doc.add(new IntPoint("point2d", 1, 1));
writer.addDocument(doc);
}
use of org.apache.lucene.document.IntPoint in project lucene-solr by apache.
the class TestIndexWriterOnDiskFull method addDoc.
// TODO: these are also in TestIndexWriter... add a simple doc-writing method
// like this to LuceneTestCase?
private void addDoc(IndexWriter writer) throws IOException {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
doc.add(new IntPoint("point2d", 1, 1));
writer.addDocument(doc);
}
Aggregations