use of org.opensearch.common.TriFunction in project OpenSearch by opensearch-project.
the class MetadataIndexUpgradeService method checkMappingsCompatibility.
/**
* Checks the mappings for compatibility with the current version
*/
private void checkMappingsCompatibility(IndexMetadata indexMetadata) {
try {
// We cannot instantiate real analysis server or similarity service at this point because the node
// might not have been started yet. However, we don't really need real analyzers or similarities at
// this stage - so we can fake it using constant maps accepting every key.
// This is ok because all used similarities and analyzers for this index were known before the upgrade.
// Missing analyzers and similarities plugin will still trigger the appropriate error during the
// actual upgrade.
IndexSettings indexSettings = new IndexSettings(indexMetadata, this.settings);
final Map<String, TriFunction<Settings, Version, ScriptService, Similarity>> similarityMap = new AbstractMap<String, TriFunction<Settings, Version, ScriptService, Similarity>>() {
@Override
public boolean containsKey(Object key) {
return true;
}
@Override
public TriFunction<Settings, Version, ScriptService, Similarity> get(Object key) {
assert key instanceof String : "key must be a string but was: " + key.getClass();
return (settings, version, scriptService) -> new BM25Similarity();
}
// this entrySet impl isn't fully correct but necessary as SimilarityService will iterate
// over all similarities
@Override
public Set<Entry<String, TriFunction<Settings, Version, ScriptService, Similarity>>> entrySet() {
return Collections.emptySet();
}
};
SimilarityService similarityService = new SimilarityService(indexSettings, null, similarityMap);
final NamedAnalyzer fakeDefault = new NamedAnalyzer("default", AnalyzerScope.INDEX, new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
throw new UnsupportedOperationException("shouldn't be here");
}
});
final Map<String, NamedAnalyzer> analyzerMap = new AbstractMap<String, NamedAnalyzer>() {
@Override
public NamedAnalyzer get(Object key) {
assert key instanceof String : "key must be a string but was: " + key.getClass();
return new NamedAnalyzer((String) key, AnalyzerScope.INDEX, fakeDefault.analyzer());
}
// this entrySet impl isn't fully correct but necessary as IndexAnalyzers will iterate
// over all analyzers to close them
@Override
public Set<Entry<String, NamedAnalyzer>> entrySet() {
return Collections.emptySet();
}
};
try (IndexAnalyzers fakeIndexAnalzyers = new IndexAnalyzers(analyzerMap, analyzerMap, analyzerMap)) {
MapperService mapperService = new MapperService(indexSettings, fakeIndexAnalzyers, xContentRegistry, similarityService, mapperRegistry, () -> null, () -> false, scriptService);
mapperService.merge(indexMetadata, MapperService.MergeReason.MAPPING_RECOVERY);
}
} catch (Exception ex) {
// Wrap the inner exception so we have the index name in the exception message
throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetadata.getIndex() + "]", ex);
}
}
use of org.opensearch.common.TriFunction in project OpenSearch by opensearch-project.
the class AbstractSortTestCase method createMockShardContext.
protected final QueryShardContext createMockShardContext(IndexSearcher searcher) {
Index index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_");
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build());
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, Mockito.mock(BitsetFilterCache.Listener.class));
TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataLookup = (fieldType, fieldIndexName, searchLookup) -> {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(fieldIndexName, searchLookup);
return builder.build(new IndexFieldDataCache.None(), null);
};
return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup, null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher, () -> randomNonNegativeLong(), null, null, () -> true, null) {
@Override
public MappedFieldType fieldMapper(String name) {
return provideMappedFieldType(name);
}
@Override
public ObjectMapper getObjectMapper(String name) {
BuilderContext context = new BuilderContext(this.getIndexSettings().getSettings(), new ContentPath());
return new ObjectMapper.Builder<>(name).nested(Nested.newNested()).build(context);
}
};
}
use of org.opensearch.common.TriFunction in project OpenSearch by opensearch-project.
the class InternalEngineTests method assertOpsOnPrimary.
private int assertOpsOnPrimary(List<Engine.Operation> ops, long currentOpVersion, boolean docDeleted, InternalEngine engine) throws IOException {
String lastFieldValue = null;
int opsPerformed = 0;
long lastOpVersion = currentOpVersion;
long lastOpSeqNo = UNASSIGNED_SEQ_NO;
long lastOpTerm = UNASSIGNED_PRIMARY_TERM;
PrimaryTermSupplier currentTerm = (PrimaryTermSupplier) engine.engineConfig.getPrimaryTermSupplier();
BiFunction<Long, Engine.Index, Engine.Index> indexWithVersion = (version, index) -> new Engine.Index(index.uid(), index.parsedDoc(), UNASSIGNED_SEQ_NO, currentTerm.get(), version, index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(), UNASSIGNED_SEQ_NO, 0);
BiFunction<Long, Engine.Delete, Engine.Delete> delWithVersion = (version, delete) -> new Engine.Delete(delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, currentTerm.get(), version, delete.versionType(), delete.origin(), delete.startTime(), UNASSIGNED_SEQ_NO, 0);
TriFunction<Long, Long, Engine.Index, Engine.Index> indexWithSeq = (seqNo, term, index) -> new Engine.Index(index.uid(), index.parsedDoc(), UNASSIGNED_SEQ_NO, currentTerm.get(), index.version(), index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(), seqNo, term);
TriFunction<Long, Long, Engine.Delete, Engine.Delete> delWithSeq = (seqNo, term, delete) -> new Engine.Delete(delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, currentTerm.get(), delete.version(), delete.versionType(), delete.origin(), delete.startTime(), seqNo, term);
Function<Engine.Index, Engine.Index> indexWithCurrentTerm = index -> new Engine.Index(index.uid(), index.parsedDoc(), UNASSIGNED_SEQ_NO, currentTerm.get(), index.version(), index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(), index.getIfSeqNo(), index.getIfPrimaryTerm());
Function<Engine.Delete, Engine.Delete> deleteWithCurrentTerm = delete -> new Engine.Delete(delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, currentTerm.get(), delete.version(), delete.versionType(), delete.origin(), delete.startTime(), delete.getIfSeqNo(), delete.getIfPrimaryTerm());
for (Engine.Operation op : ops) {
final boolean versionConflict = rarely();
final boolean versionedOp = versionConflict || randomBoolean();
final long conflictingVersion = docDeleted || randomBoolean() ? lastOpVersion + (randomBoolean() ? 1 : -1) : Versions.MATCH_DELETED;
final long conflictingSeqNo = // use 5 to go above 0 for
lastOpSeqNo == UNASSIGNED_SEQ_NO || randomBoolean() ? // use 5 to go above 0 for
lastOpSeqNo + 5 : // magic numbers
lastOpSeqNo;
final long conflictingTerm = conflictingSeqNo == lastOpSeqNo || randomBoolean() ? lastOpTerm + 1 : lastOpTerm;
if (rarely()) {
currentTerm.set(currentTerm.get() + 1L);
engine.rollTranslogGeneration();
}
final long correctVersion = docDeleted ? Versions.MATCH_DELETED : lastOpVersion;
logger.info("performing [{}]{}{}", op.operationType().name().charAt(0), versionConflict ? " (conflict " + conflictingVersion + ")" : "", versionedOp ? " (versioned " + correctVersion + ", seqNo " + lastOpSeqNo + ", term " + lastOpTerm + " )" : "");
if (op instanceof Engine.Index) {
final Engine.Index index = (Engine.Index) op;
if (versionConflict) {
// generate a conflict
final Engine.IndexResult result;
if (randomBoolean()) {
result = engine.index(indexWithSeq.apply(conflictingSeqNo, conflictingTerm, index));
} else {
result = engine.index(indexWithVersion.apply(conflictingVersion, index));
}
assertThat(result.isCreated(), equalTo(false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
assertThat(result.getFailure().getStackTrace(), emptyArray());
} else {
final Engine.IndexResult result;
if (versionedOp) {
// TODO: add support for non-existing docs
if (randomBoolean() && lastOpSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO && docDeleted == false) {
result = engine.index(indexWithSeq.apply(lastOpSeqNo, lastOpTerm, index));
} else {
result = engine.index(indexWithVersion.apply(correctVersion, index));
}
} else {
result = engine.index(indexWithCurrentTerm.apply(index));
}
assertThat(result.isCreated(), equalTo(docDeleted));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
lastFieldValue = index.docs().get(0).get("value");
docDeleted = false;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
} else {
final Engine.Delete delete = (Engine.Delete) op;
if (versionConflict) {
// generate a conflict
Engine.DeleteResult result;
if (randomBoolean()) {
result = engine.delete(delWithSeq.apply(conflictingSeqNo, conflictingTerm, delete));
} else {
result = engine.delete(delWithVersion.apply(conflictingVersion, delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
assertThat(result.getFailure().getStackTrace(), emptyArray());
} else {
final Engine.DeleteResult result;
long correctSeqNo = docDeleted ? UNASSIGNED_SEQ_NO : lastOpSeqNo;
if (versionedOp && lastOpSeqNo != UNASSIGNED_SEQ_NO && randomBoolean()) {
result = engine.delete(delWithSeq.apply(correctSeqNo, lastOpTerm, delete));
} else if (versionedOp) {
result = engine.delete(delWithVersion.apply(correctVersion, delete));
} else {
result = engine.delete(deleteWithCurrentTerm.apply(delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = true;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
}
if (randomBoolean()) {
// refresh and take the chance to check everything is ok so far
assertVisibleCount(engine, docDeleted ? 0 : 1);
// first op and it failed.
if (docDeleted == false && lastFieldValue != null) {
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
if (rarely()) {
// simulate GC deletes
engine.refresh("gc_simulation", Engine.SearcherScope.INTERNAL, true);
engine.clearDeletedTombstones();
if (docDeleted) {
lastOpVersion = Versions.NOT_FOUND;
lastOpSeqNo = UNASSIGNED_SEQ_NO;
lastOpTerm = UNASSIGNED_PRIMARY_TERM;
}
}
}
assertVisibleCount(engine, docDeleted ? 0 : 1);
if (docDeleted == false) {
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
return opsPerformed;
}
Aggregations