use of org.elasticsearch.index.engine.Engine.Searcher in project elasticsearch by elastic.
the class InternalEngineTests method testTranslogReplayWithFailure.
public void testTranslogReplayWithFailure() throws IOException {
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
engine.close();
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
boolean started = false;
final int numIters = randomIntBetween(10, 20);
for (int i = 0; i < numIters; i++) {
directory.setRandomIOExceptionRateOnOpen(randomDouble());
directory.setRandomIOExceptionRate(randomDouble());
directory.setFailOnOpenInput(randomBoolean());
directory.setAllowRandomFileNotFoundException(randomBoolean());
try {
engine = createEngine(store, primaryTranslogDir);
started = true;
break;
} catch (EngineException | IOException e) {
}
}
directory.setRandomIOExceptionRateOnOpen(0.0);
directory.setRandomIOExceptionRate(0.0);
directory.setFailOnOpenInput(false);
directory.setAllowRandomFileNotFoundException(false);
if (started == false) {
engine = createEngine(store, primaryTranslogDir);
}
} else {
// no mock directory, no fun.
engine = createEngine(store, primaryTranslogDir);
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
}
use of org.elasticsearch.index.engine.Engine.Searcher in project elasticsearch by elastic.
the class ValuesSourceConfigTests method testLong.
public void testLong() throws Exception {
IndexService indexService = createIndex("index", Settings.EMPTY, "type", "long", "type=long");
client().prepareIndex("index", "type", "1").setSource("long", 42).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L);
ValuesSourceConfig<ValuesSource.Numeric> config = ValuesSourceConfig.resolve(context, null, "long", null, null, null, null);
ValuesSource.Numeric valuesSource = config.toValuesSource(context);
LeafReaderContext ctx = searcher.reader().leaves().get(0);
SortedNumericDocValues values = valuesSource.longValues(ctx);
values.setDocument(0);
assertEquals(1, values.count());
assertEquals(42, values.valueAt(0));
}
}
use of org.elasticsearch.index.engine.Engine.Searcher in project elasticsearch by elastic.
the class ValuesSourceConfigTests method testEmptyBoolean.
public void testEmptyBoolean() throws Exception {
IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bool", "type=boolean");
client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L);
ValuesSourceConfig<ValuesSource.Numeric> config = ValuesSourceConfig.resolve(context, null, "bool", null, null, null, null);
ValuesSource.Numeric valuesSource = config.toValuesSource(context);
LeafReaderContext ctx = searcher.reader().leaves().get(0);
SortedNumericDocValues values = valuesSource.longValues(ctx);
values.setDocument(0);
assertEquals(0, values.count());
config = ValuesSourceConfig.resolve(context, null, "bool", null, true, null, null);
valuesSource = config.toValuesSource(context);
values = valuesSource.longValues(ctx);
values.setDocument(0);
assertEquals(1, values.count());
assertEquals(1, values.valueAt(0));
}
}
use of org.elasticsearch.index.engine.Engine.Searcher in project elasticsearch by elastic.
the class ValuesSourceConfigTests method testUnmappedBoolean.
public void testUnmappedBoolean() throws Exception {
IndexService indexService = createIndex("index", Settings.EMPTY, "type");
client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L);
ValuesSourceConfig<ValuesSource.Numeric> config = ValuesSourceConfig.resolve(context, ValueType.BOOLEAN, "bool", null, null, null, null);
ValuesSource.Numeric valuesSource = config.toValuesSource(context);
assertNull(valuesSource);
config = ValuesSourceConfig.resolve(context, ValueType.BOOLEAN, "bool", null, true, null, null);
valuesSource = config.toValuesSource(context);
LeafReaderContext ctx = searcher.reader().leaves().get(0);
SortedNumericDocValues values = valuesSource.longValues(ctx);
values.setDocument(0);
assertEquals(1, values.count());
assertEquals(1, values.valueAt(0));
}
}
use of org.elasticsearch.index.engine.Engine.Searcher in project elasticsearch by elastic.
the class InternalEngineTests method testUpgradeOldIndex.
public void testUpgradeOldIndex() throws IOException {
List<Path> indexes = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) {
for (Path path : stream) {
indexes.add(path);
}
}
Collections.shuffle(indexes, random());
for (Path indexFile : indexes.subList(0, scaledRandomIntBetween(1, indexes.size() / 2))) {
final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT);
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
// decompress the index
try (InputStream stream = Files.newInputStream(indexFile)) {
TestUtil.unzip(stream, unzipDir);
}
// check it is unique
assertTrue(Files.exists(unzipDataDir));
Path[] list = filterExtraFSFiles(FileSystemUtils.files(unzipDataDir));
if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length + " " + Arrays.toString(list));
}
// the bwc scripts packs the indices under this path
Path src = OldIndexUtils.getIndexDir(logger, indexName, indexFile.toString(), list[0]);
Path translog = src.resolve("0").resolve("translog");
assertTrue("[" + indexFile + "] missing translog dir: " + translog.toString(), Files.exists(translog));
Path[] tlogFiles = filterExtraFSFiles(FileSystemUtils.files(translog));
// ckp & tlog
assertEquals(Arrays.toString(tlogFiles), tlogFiles.length, 2);
Path tlogFile = tlogFiles[0].getFileName().toString().endsWith("tlog") ? tlogFiles[0] : tlogFiles[1];
final long size = Files.size(tlogFile);
logger.debug("upgrading index {} file: {} size: {}", indexName, tlogFiles[0].getFileName(), size);
Directory directory = newFSDirectory(src.resolve("0").resolve("index"));
final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), src);
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetaData);
final Store store = createStore(indexSettings, directory);
final int iters = randomIntBetween(0, 2);
int numDocs = -1;
for (int i = 0; i < iters; i++) {
// make sure we can restart on an upgraded index
try (InternalEngine engine = createEngine(indexSettings, store, translog, newMergePolicy())) {
try (Searcher searcher = engine.acquireSearcher("test")) {
if (i > 0) {
assertEquals(numDocs, searcher.reader().numDocs());
}
TopDocs search = searcher.searcher().search(new MatchAllDocsQuery(), 1);
numDocs = searcher.reader().numDocs();
assertTrue(search.totalHits > 1);
}
CommitStats commitStats = engine.commitStats();
Map<String, String> userData = commitStats.getUserData();
assertTrue("user data doesn't contain uuid", userData.containsKey(Translog.TRANSLOG_UUID_KEY));
assertTrue("user data doesn't contain generation key", userData.containsKey(Translog.TRANSLOG_GENERATION_KEY));
assertFalse("user data contains legacy marker", userData.containsKey("translog_id"));
}
}
try (InternalEngine engine = createEngine(indexSettings, store, translog, newMergePolicy())) {
if (numDocs == -1) {
try (Searcher searcher = engine.acquireSearcher("test")) {
numDocs = searcher.reader().numDocs();
}
}
final int numExtraDocs = randomIntBetween(1, 10);
for (int i = 0; i < numExtraDocs; i++) {
ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + numExtraDocs));
assertThat(topDocs.totalHits, equalTo(numDocs + numExtraDocs));
}
}
IOUtils.close(store, directory);
}
}
Aggregations