use of org.opensearch.env.Environment in project OpenSearch by opensearch-project.
the class HunspellServiceTests method testDicWithTwoAffs.
public void testDicWithTwoAffs() throws Exception {
Settings settings = Settings.builder().put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
final Environment environment = new Environment(settings, getDataPath("/indices/analyze/two_aff_conf_dir"));
new HunspellService(settings, environment, emptyMap()).getDictionary("en_US");
});
assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage());
assertThat(e.getCause(), hasToString(containsString("Too many affix files")));
}
use of org.opensearch.env.Environment in project OpenSearch by opensearch-project.
the class HunspellServiceTests method testDicWithNoAff.
public void testDicWithNoAff() throws Exception {
Settings settings = Settings.builder().put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
final Environment environment = new Environment(settings, getDataPath("/indices/analyze/no_aff_conf_dir"));
new HunspellService(settings, environment, emptyMap()).getDictionary("en_US");
});
assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage());
assertThat(e.getCause(), hasToString(containsString("Missing affix file")));
}
use of org.opensearch.env.Environment in project OpenSearch by opensearch-project.
the class AnalysisModule method setupTokenFilters.
private NamedRegistry<AnalysisProvider<TokenFilterFactory>> setupTokenFilters(List<AnalysisPlugin> plugins, HunspellService hunspellService) {
NamedRegistry<AnalysisProvider<TokenFilterFactory>> tokenFilters = new NamedRegistry<>("token_filter");
tokenFilters.register("stop", StopTokenFilterFactory::new);
// Add "standard" for old indices (bwc)
tokenFilters.register("standard", new AnalysisProvider<TokenFilterFactory>() {
@Override
public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
if (indexSettings.getIndexVersionCreated().before(LegacyESVersion.V_7_0_0)) {
deprecationLogger.deprecate("standard_deprecation", "The [standard] token filter name is deprecated and will be removed in a future version.");
} else {
throw new IllegalArgumentException("The [standard] token filter has been removed.");
}
return new AbstractTokenFilterFactory(indexSettings, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
return tokenStream;
}
};
}
@Override
public boolean requiresAnalysisSettings() {
return false;
}
});
tokenFilters.register("shingle", ShingleTokenFilterFactory::new);
tokenFilters.register("hunspell", requiresAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService)));
tokenFilters.extractAndRegister(plugins, AnalysisPlugin::getTokenFilters);
return tokenFilters;
}
use of org.opensearch.env.Environment in project OpenSearch by opensearch-project.
the class MetadataCreateIndexServiceTests method testIndexLifecycleNameSetting.
public void testIndexLifecycleNameSetting() {
// see: https://github.com/opensearch-project/OpenSearch/issues/1019
final Settings ilnSetting = Settings.builder().put("index.lifecycle.name", "dummy").build();
withTemporaryClusterService(((clusterService, threadPool) -> {
MetadataCreateIndexService checkerService = new MetadataCreateIndexService(Settings.EMPTY, clusterService, null, null, null, createTestShardLimitService(randomIntBetween(1, 1000), clusterService), new Environment(Settings.builder().put("path.home", "dummy").build(), null), new IndexScopedSettings(ilnSetting, Collections.emptySet()), threadPool, null, new SystemIndices(Collections.emptyMap()), true);
final List<String> validationErrors = checkerService.getIndexSettingsValidationErrors(ilnSetting, true);
assertThat(validationErrors.size(), is(1));
assertThat(validationErrors.get(0), is("expected [index.lifecycle.name] to be private but it was not"));
}));
}
use of org.opensearch.env.Environment in project OpenSearch by opensearch-project.
the class RemoveCorruptedShardDataCommand method findAndProcessShardPath.
protected void findAndProcessShardPath(OptionSet options, Environment environment, Path[] dataPaths, int nodeLockId, ClusterState clusterState, CheckedConsumer<ShardPath, IOException> consumer) throws IOException {
final Settings settings = environment.settings();
final IndexMetadata indexMetadata;
final int shardId;
final int fromNodeId;
final int toNodeId;
if (options.has(folderOption)) {
final Path path = getPath(folderOption.value(options)).getParent();
final Path shardParent = path.getParent();
final Path shardParentParent = shardParent.getParent();
final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME);
if (Files.exists(indexPath) == false || Files.isDirectory(indexPath) == false) {
throw new OpenSearchException("index directory [" + indexPath + "], must exist and be a directory");
}
final String shardIdFileName = path.getFileName().toString();
final String nodeIdFileName = shardParentParent.getParent().getFileName().toString();
final String indexUUIDFolderName = shardParent.getFileName().toString();
if (Files.isDirectory(path) && // SHARD-ID path element check
shardIdFileName.chars().allMatch(Character::isDigit) && // `indices` check
NodeEnvironment.INDICES_FOLDER.equals(shardParentParent.getFileName().toString()) && // NODE-ID check
nodeIdFileName.chars().allMatch(Character::isDigit) && // `nodes` check
NodeEnvironment.NODES_FOLDER.equals(shardParentParent.getParent().getParent().getFileName().toString())) {
shardId = Integer.parseInt(shardIdFileName);
fromNodeId = Integer.parseInt(nodeIdFileName);
toNodeId = fromNodeId + 1;
indexMetadata = StreamSupport.stream(clusterState.metadata().indices().values().spliterator(), false).map(imd -> imd.value).filter(imd -> imd.getIndexUUID().equals(indexUUIDFolderName)).findFirst().orElse(null);
} else {
throw new OpenSearchException("Unable to resolve shard id. Wrong folder structure at [ " + path.toString() + " ], expected .../nodes/[NODE-ID]/indices/[INDEX-UUID]/[SHARD-ID]");
}
} else {
// otherwise resolve shardPath based on the index name and shard id
String indexName = Objects.requireNonNull(indexNameOption.value(options), "Index name is required");
shardId = Objects.requireNonNull(shardIdOption.value(options), "Shard ID is required");
indexMetadata = clusterState.metadata().index(indexName);
}
if (indexMetadata == null) {
throw new OpenSearchException("Unable to find index in cluster state");
}
final IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
final Index index = indexMetadata.getIndex();
final ShardId shId = new ShardId(index, shardId);
for (Path dataPath : dataPaths) {
final Path shardPathLocation = dataPath.resolve(NodeEnvironment.INDICES_FOLDER).resolve(index.getUUID()).resolve(Integer.toString(shId.id()));
if (Files.exists(shardPathLocation)) {
final ShardPath shardPath = ShardPath.loadShardPath(logger, shId, indexSettings.customDataPath(), new Path[] { shardPathLocation }, nodeLockId, dataPath);
if (shardPath != null) {
consumer.accept(shardPath);
return;
}
}
}
throw new OpenSearchException("Unable to resolve shard path for index [" + indexMetadata.getIndex().getName() + "] and shard id [" + shardId + "]");
}
Aggregations