use of org.opensearch.index.IndexSettings in project OpenSearch by opensearch-project.
the class IndicesClientIT method testSplit.
@SuppressWarnings("unchecked")
public void testSplit() throws IOException {
createIndex("source", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).put("index.number_of_routing_shards", 4).build());
updateIndexSettings("source", Settings.builder().put("index.blocks.write", true));
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
resizeRequest.setResizeType(ResizeType.SPLIT);
Settings targetSettings = Settings.builder().put("index.number_of_shards", 4).put("index.number_of_replicas", 0).build();
resizeRequest.setTargetIndex(new org.opensearch.action.admin.indices.create.CreateIndexRequest("target").settings(targetSettings).alias(new Alias("alias")));
ResizeResponse resizeResponse = execute(resizeRequest, highLevelClient().indices()::split, highLevelClient().indices()::splitAsync);
assertTrue(resizeResponse.isAcknowledged());
assertTrue(resizeResponse.isShardsAcknowledged());
Map<String, Object> getIndexResponse = getAsMap("target");
Map<String, Object> indexSettings = (Map<String, Object>) XContentMapValues.extractValue("target.settings.index", getIndexResponse);
assertNotNull(indexSettings);
assertEquals("4", indexSettings.get("number_of_shards"));
assertEquals("0", indexSettings.get("number_of_replicas"));
Map<String, Object> aliasData = (Map<String, Object>) XContentMapValues.extractValue("target.aliases.alias", getIndexResponse);
assertNotNull(aliasData);
}
use of org.opensearch.index.IndexSettings in project OpenSearch by opensearch-project.
the class IndicesClientIT method testClone.
@SuppressWarnings("unchecked")
public void testClone() throws IOException {
createIndex("source", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).put("index.number_of_routing_shards", 4).build());
updateIndexSettings("source", Settings.builder().put("index.blocks.write", true));
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
resizeRequest.setResizeType(ResizeType.CLONE);
Settings targetSettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build();
resizeRequest.setTargetIndex(new org.opensearch.action.admin.indices.create.CreateIndexRequest("target").settings(targetSettings).alias(new Alias("alias")));
ResizeResponse resizeResponse = execute(resizeRequest, highLevelClient().indices()::clone, highLevelClient().indices()::cloneAsync);
assertTrue(resizeResponse.isAcknowledged());
assertTrue(resizeResponse.isShardsAcknowledged());
Map<String, Object> getIndexResponse = getAsMap("target");
Map<String, Object> indexSettings = (Map<String, Object>) XContentMapValues.extractValue("target.settings.index", getIndexResponse);
assertNotNull(indexSettings);
assertEquals("2", indexSettings.get("number_of_shards"));
assertEquals("0", indexSettings.get("number_of_replicas"));
Map<String, Object> aliasData = (Map<String, Object>) XContentMapValues.extractValue("target.aliases.alias", getIndexResponse);
assertNotNull(aliasData);
}
use of org.opensearch.index.IndexSettings in project OpenSearch by opensearch-project.
the class CompoundAnalysisTests method analyze.
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisModule analysisModule = createAnalysisModule(settings);
IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings);
Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
TokenStream stream = analyzer.tokenStream("", text);
stream.reset();
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
List<String> terms = new ArrayList<>();
while (stream.incrementToken()) {
String tokText = termAtt.toString();
terms.add(tokText);
}
return terms;
}
use of org.opensearch.index.IndexSettings in project OpenSearch by opensearch-project.
the class EdgeNGramTokenizerTests method buildAnalyzers.
private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings);
}
use of org.opensearch.index.IndexSettings in project OpenSearch by opensearch-project.
the class WhitespaceTokenizerFactoryTests method testMaxTokenLength.
public void testMaxTokenLength() throws IOException {
final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(new Index("test", "_na_"), indexSettings);
final Settings settings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, 2).build();
WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", settings).create();
try (Reader reader = new StringReader("one, two, three")) {
tokenizer.setReader(reader);
assertTokenStreamContents(tokenizer, new String[] { "on", "e,", "tw", "o,", "th", "re", "e" });
}
final Settings defaultSettings = Settings.EMPTY;
tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", defaultSettings).create();
String veryLongToken = RandomStrings.randomAsciiAlphanumOfLength(random(), 256);
try (Reader reader = new StringReader(veryLongToken)) {
tokenizer.setReader(reader);
assertTokenStreamContents(tokenizer, new String[] { veryLongToken.substring(0, 255), veryLongToken.substring(255) });
}
final Settings tooLongSettings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, 1024 * 1024 + 1).build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", tooLongSettings).create());
assertEquals("maxTokenLen must be greater than 0 and less than 1048576 passed: 1048577", e.getMessage());
final Settings negativeSettings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, -1).build();
e = expectThrows(IllegalArgumentException.class, () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", negativeSettings).create());
assertEquals("maxTokenLen must be greater than 0 and less than 1048576 passed: -1", e.getMessage());
}
Aggregations