use of org.apache.lucene.store.SimpleFSDirectory in project elasticsearch by elastic.
the class KeyStoreWrapper method load.
/**
* Loads information about the Elasticsearch keystore from the provided config directory.
*
* {@link #decrypt(char[])} must be called before reading or writing any entries.
* Returns {@code null} if no keystore exists.
*/
public static KeyStoreWrapper load(Path configDir) throws IOException {
Path keystoreFile = keystorePath(configDir);
if (Files.exists(keystoreFile) == false) {
return null;
}
SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
try (IndexInput indexInput = directory.openInput(KEYSTORE_FILENAME, IOContext.READONCE)) {
ChecksumIndexInput input = new BufferedChecksumIndexInput(indexInput);
CodecUtil.checkHeader(input, KEYSTORE_FILENAME, FORMAT_VERSION, FORMAT_VERSION);
byte hasPasswordByte = input.readByte();
boolean hasPassword = hasPasswordByte == 1;
if (hasPassword == false && hasPasswordByte != 0) {
throw new IllegalStateException("hasPassword boolean is corrupt: " + String.format(Locale.ROOT, "%02x", hasPasswordByte));
}
String type = input.readString();
String secretKeyAlgo = input.readString();
byte[] keystoreBytes = new byte[input.readInt()];
input.readBytes(keystoreBytes, 0, keystoreBytes.length);
CodecUtil.checkFooter(input);
return new KeyStoreWrapper(hasPassword, type, secretKeyAlgo, keystoreBytes);
}
}
use of org.apache.lucene.store.SimpleFSDirectory in project entando-core by entando.
the class SearcherDAO method getSearcher.
private IndexSearcher getSearcher() throws IOException {
FSDirectory directory = new SimpleFSDirectory(_indexDir);
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(reader);
return searcher;
}
use of org.apache.lucene.store.SimpleFSDirectory in project entando-core by entando.
the class SearcherDAO method getSearcher.
private IndexSearcher getSearcher() throws IOException {
FSDirectory directory = new SimpleFSDirectory(_indexDir);
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(reader);
return searcher;
}
use of org.apache.lucene.store.SimpleFSDirectory in project elephant-bird by twitter.
the class HdfsMergeTool method execute.
@Override
public void execute(Mapper.Context context) throws IOException {
Configuration conf = HadoopCompat.getConfiguration(context);
List<String> indexes = HadoopUtils.readStringListFromConfAsBase64(INDEXES_KEY, conf);
Path output = new Path(conf.get(OUTPUT_KEY));
File tmpDirFile = Files.createTempDir();
int maxMergeFactor = conf.getInt(MAX_MERGE_FACTOR_KEY, -1);
Preconditions.checkArgument(maxMergeFactor > 0);
Directory directory = new SimpleFSDirectory(tmpDirFile, NoLockFactory.getNoLockFactory());
IndexWriter writer = LuceneIndexOutputFormat.createIndexWriter(directory, new LuceneIndexOutputFormat.NeverTokenizeAnalyzer(), maxMergeFactor);
Directory[] dirs = new Directory[indexes.size()];
int dir = 0;
for (String index : indexes) {
dirs[dir++] = new LuceneHdfsDirectory(index, FileSystem.get(conf));
}
LOG.info("Adding indexes: " + indexes);
writer.addIndexes(dirs);
LOG.info("Force mergeing...");
writer.forceMerge(1);
LOG.info("Closing writer...");
writer.close();
FileSystem fs = FileSystem.get(conf);
LOG.info("Copying index to HDFS...");
if (!FileUtil.copy(tmpDirFile, fs, output, true, conf)) {
throw new IOException("Failed to copy local index to HDFS!");
}
LOG.info("Index written to: " + output);
}
use of org.apache.lucene.store.SimpleFSDirectory in project jspwiki by apache.
the class LuceneSearchProvider method updateLuceneIndex.
/**
* Updates the lucene index for a single page.
*
* @param page The WikiPage to check
* @param text The page text to index.
*/
protected synchronized void updateLuceneIndex(WikiPage page, String text) {
IndexWriter writer = null;
log.debug("Updating Lucene index for page '" + page.getName() + "'...");
Directory luceneDir = null;
try {
pageRemoved(page);
// Now add back the new version.
luceneDir = new SimpleFSDirectory(new File(m_luceneDirectory), null);
writer = getIndexWriter(luceneDir);
luceneIndexPage(page, text, writer);
} catch (IOException e) {
log.error("Unable to update page '" + page.getName() + "' from Lucene index", e);
// reindexPage( page );
} catch (Exception e) {
log.error("Unexpected Lucene exception - please check configuration!", e);
// reindexPage( page );
} finally {
close(writer);
}
log.debug("Done updating Lucene index for page '" + page.getName() + "'.");
}
Aggregations