use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class StemmerTestBase method init.
static void init(boolean ignoreCase, String affix, String... dictionaries) throws IOException, ParseException {
if (dictionaries.length == 0) {
throw new IllegalArgumentException("there must be at least one dictionary");
}
InputStream affixStream = StemmerTestBase.class.getResourceAsStream(affix);
if (affixStream == null) {
throw new FileNotFoundException("file not found: " + affix);
}
InputStream[] dictStreams = new InputStream[dictionaries.length];
for (int i = 0; i < dictionaries.length; i++) {
dictStreams[i] = StemmerTestBase.class.getResourceAsStream(dictionaries[i]);
if (dictStreams[i] == null) {
throw new FileNotFoundException("file not found: " + dictStreams[i]);
}
}
try {
Dictionary dictionary = new Dictionary(new RAMDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
stemmer = new Stemmer(dictionary);
} finally {
IOUtils.closeWhileHandlingException(affixStream);
IOUtils.closeWhileHandlingException(dictStreams);
}
}
use of org.apache.lucene.store.RAMDirectory in project jena by apache.
the class TestLuceneWithMultipleThreads method testReadInMiddleOfWrite.
@Test
public void testReadInMiddleOfWrite() throws InterruptedException, ExecutionException {
final DatasetGraphText dsg = (DatasetGraphText) TextDatasetFactory.createLucene(DatasetGraphFactory.create(), new RAMDirectory(), new TextIndexConfig(entDef));
final Dataset ds = DatasetFactory.wrap(dsg);
final ExecutorService execService = Executors.newSingleThreadExecutor();
final Future<?> f = execService.submit(new Runnable() {
@Override
public void run() {
// Hammer the dataset with a series of read queries
while (!Thread.interrupted()) {
dsg.begin(ReadWrite.READ);
try {
QueryExecution qExec = QueryExecutionFactory.create("select * where { ?s ?p ?o }", ds);
ResultSet rs = qExec.execSelect();
while (rs.hasNext()) {
rs.next();
}
dsg.commit();
} finally {
dsg.end();
}
}
}
});
dsg.begin(ReadWrite.WRITE);
try {
Model m = ds.getDefaultModel();
m.add(ResourceFactory.createResource("http://example.org/"), RDFS.label, "entity");
// Sleep for a bit so that the reader thread can get in between these two writes
Thread.sleep(100);
m.add(ResourceFactory.createResource("http://example.org/"), RDFS.comment, "comment");
dsg.commit();
} finally {
dsg.end();
}
execService.shutdownNow();
execService.awaitTermination(1000, TimeUnit.MILLISECONDS);
// If there was an exception in the read thread then Future.get() will throw an ExecutionException
assertTrue(f.get() == null);
}
use of org.apache.lucene.store.RAMDirectory in project jena by apache.
the class AbstractTestDatasetWithLuceneGraphTextIndex method init.
@Before
public void init() {
Dataset ds1 = TDBFactory.createDataset();
Directory dir = new RAMDirectory();
EntityDefinition eDef = new EntityDefinition("iri", "text");
eDef.setGraphField("graph");
eDef.setPrimaryPredicate(RDFS.label);
// some tests require indexing rdfs:comment
eDef.set("comment", RDFS.comment.asNode());
TextIndex tidx = new TextIndexLucene(dir, new TextIndexConfig(eDef));
dataset = TextDatasetFactory.create(ds1, tidx);
}
use of org.apache.lucene.store.RAMDirectory in project jena by apache.
the class AbstractTestDatasetWithLuceneTextIndexDeletionSupport method init.
@Before
public void init() {
Dataset ds1 = TDBFactory.createDataset();
Directory dir = new RAMDirectory();
EntityDefinition eDef = new EntityDefinition("iri", "text");
eDef.setPrimaryPredicate(RDFS.label);
// some tests require indexing rdfs:comment
eDef.set("comment", RDFS.comment.asNode());
//uid field to allow deletion
eDef.setUidField("uid");
TextIndex tidx = new TextIndexLucene(dir, new TextIndexConfig(eDef));
dataset = TextDatasetFactory.create(ds1, tidx);
}
use of org.apache.lucene.store.RAMDirectory in project jena by apache.
the class TextIndexLuceneAssembler method open.
/*
<#index> a :TextIndexLucene ;
#text:directory "mem" ;
#text:directory "DIR" ;
text:directory <file:DIR> ;
text:entityMap <#endMap> ;
.
*/
@SuppressWarnings("resource")
@Override
public TextIndex open(Assembler a, Resource root, Mode mode) {
try {
if (!GraphUtils.exactlyOneProperty(root, pDirectory))
throw new TextIndexException("No 'text:directory' property on " + root);
Directory directory;
RDFNode n = root.getProperty(pDirectory).getObject();
if (n.isLiteral()) {
String literalValue = n.asLiteral().getLexicalForm();
if (literalValue.equals("mem")) {
directory = new RAMDirectory();
} else {
File dir = new File(literalValue);
directory = FSDirectory.open(dir.toPath());
}
} else {
Resource x = n.asResource();
String path = IRILib.IRIToFilename(x.getURI());
File dir = new File(path);
directory = FSDirectory.open(dir.toPath());
}
Analyzer analyzer = null;
Statement analyzerStatement = root.getProperty(pAnalyzer);
if (null != analyzerStatement) {
RDFNode aNode = analyzerStatement.getObject();
if (!aNode.isResource()) {
throw new TextIndexException("Text analyzer property is not a resource : " + aNode);
}
Resource analyzerResource = (Resource) aNode;
analyzer = (Analyzer) a.open(analyzerResource);
}
Analyzer queryAnalyzer = null;
Statement queryAnalyzerStatement = root.getProperty(pQueryAnalyzer);
if (null != queryAnalyzerStatement) {
RDFNode qaNode = queryAnalyzerStatement.getObject();
if (!qaNode.isResource()) {
throw new TextIndexException("Text query analyzer property is not a resource : " + qaNode);
}
Resource analyzerResource = (Resource) qaNode;
queryAnalyzer = (Analyzer) a.open(analyzerResource);
}
String queryParser = null;
Statement queryParserStatement = root.getProperty(pQueryParser);
if (null != queryParserStatement) {
RDFNode qpNode = queryParserStatement.getObject();
if (!qpNode.isResource()) {
throw new TextIndexException("Text query parser property is not a resource : " + qpNode);
}
Resource parserResource = (Resource) qpNode;
queryParser = parserResource.getLocalName();
}
boolean isMultilingualSupport = false;
Statement mlSupportStatement = root.getProperty(pMultilingualSupport);
if (null != mlSupportStatement) {
RDFNode mlsNode = mlSupportStatement.getObject();
if (!mlsNode.isLiteral()) {
throw new TextIndexException("text:multilingualSupport property must be a string : " + mlsNode);
}
isMultilingualSupport = mlsNode.asLiteral().getBoolean();
}
boolean storeValues = false;
Statement storeValuesStatement = root.getProperty(pStoreValues);
if (null != storeValuesStatement) {
RDFNode svNode = storeValuesStatement.getObject();
if (!svNode.isLiteral()) {
throw new TextIndexException("text:storeValues property must be a string : " + svNode);
}
storeValues = svNode.asLiteral().getBoolean();
}
Resource r = GraphUtils.getResourceValue(root, pEntityMap);
EntityDefinition docDef = (EntityDefinition) a.open(r);
TextIndexConfig config = new TextIndexConfig(docDef);
config.setAnalyzer(analyzer);
config.setQueryAnalyzer(queryAnalyzer);
config.setQueryParser(queryParser);
config.setMultilingualSupport(isMultilingualSupport);
config.setValueStored(storeValues);
return TextDatasetFactory.createLuceneIndex(directory, config);
} catch (IOException e) {
IO.exception(e);
return null;
}
}
Aggregations