use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings in project elasticsearch by elastic.
the class InternalSettingsPreparer method prepareEnvironment.
/**
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings,
* and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded,
* settings with a value of <code>${prompt.text}</code> or <code>${prompt.secret}</code> will result in a prompt for
* the setting to the user.
* @param input The custom settings to use. These are not overwritten by settings in the configuration file.
* @param terminal the Terminal to use for input/output
* @param properties Map of properties key/value pairs (usually from the command-line)
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
*/
public static Environment prepareEnvironment(Settings input, Terminal terminal, Map<String, String> properties) {
// just create enough settings to build the environment, to get the config dir
Settings.Builder output = Settings.builder();
initializeSettings(output, input, properties);
Environment environment = new Environment(output.build());
// start with a fresh output
output = Settings.builder();
boolean settingsFileFound = false;
Set<String> foundSuffixes = new HashSet<>();
for (String allowedSuffix : ALLOWED_SUFFIXES) {
Path path = environment.configFile().resolve("elasticsearch" + allowedSuffix);
if (Files.exists(path)) {
if (!settingsFileFound) {
try {
output.loadFromPath(path);
} catch (IOException e) {
throw new SettingsException("Failed to load settings from " + path.toString(), e);
}
}
settingsFileFound = true;
foundSuffixes.add(allowedSuffix);
}
}
if (foundSuffixes.size() > 1) {
throw new SettingsException("multiple settings files found with suffixes: " + Strings.collectionToDelimitedString(foundSuffixes, ","));
}
// re-initialize settings now that the config file has been loaded
initializeSettings(output, input, properties);
finalizeSettings(output, terminal);
environment = new Environment(output.build());
// we put back the path.logs so we can use it in the logging configuration file
output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString()));
return new Environment(output.build());
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings in project elasticsearch by elastic.
the class PluginsService method updatedSettings.
public Settings updatedSettings() {
Map<String, String> foundSettings = new HashMap<>();
final Settings.Builder builder = Settings.builder();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
Settings settings = plugin.v2().additionalSettings();
for (String setting : settings.getAsMap().keySet()) {
String oldPlugin = foundSettings.put(setting, plugin.v1().getName());
if (oldPlugin != null) {
throw new IllegalArgumentException("Cannot have additional setting [" + setting + "] " + "in plugin [" + plugin.v1().getName() + "], already added in plugin [" + oldPlugin + "]");
}
}
builder.put(settings);
}
return builder.put(this.settings).build();
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings in project elasticsearch by elastic.
the class TransportTasksActionTests method testFailedTasksCount.
public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException {
Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build();
setupTestNodes(settings);
connectNodes(testNodes);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
RecordingTaskManagerListener[] listeners = setupListeners(testNodes, "testAction*");
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(CLUSTER_SETTINGS, "testAction", threadPool, testNodes[i].clusterService, testNodes[i].transportService) {
@Override
protected NodeResponse nodeOperation(NodeRequest request) {
logger.info("Action on node {}", node);
throw new RuntimeException("Test exception");
}
};
}
for (TestNode testNode : testNodes) {
assertEquals(0, testNode.transportService.getTaskManager().getTasks().size());
}
NodesRequest request = new NodesRequest("Test Request");
NodesResponse responses = actions[0].execute(request).get();
assertEquals(nodesCount, responses.failureCount());
// Make sure that actions are still registered in the task manager on all nodes
// Twice on the coordinating node and once on all other nodes.
assertEquals(4, listeners[0].getEvents().size());
assertEquals(2, listeners[0].getRegistrationEvents().size());
assertEquals(2, listeners[0].getUnregistrationEvents().size());
for (int i = 1; i < listeners.length; i++) {
assertEquals(2, listeners[i].getEvents().size());
assertEquals(1, listeners[i].getRegistrationEvents().size());
assertEquals(1, listeners[i].getUnregistrationEvents().size());
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings in project elasticsearch by elastic.
the class GetTermVectorsIT method testArtificialDocWithPreference.
public void testArtificialDocWithPreference() throws ExecutionException, InterruptedException, IOException {
// setup indices
Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard");
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets"));
ensureGreen();
// index document
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "random permutation"));
// Get search shards
ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards("test").get();
List<Integer> shardIds = Arrays.stream(searchShardsResponse.getGroups()).map(s -> s.getShardId().id()).collect(Collectors.toList());
// request termvectors of artificial document from each shard
int sumTotalTermFreq = 0;
int sumDocFreq = 0;
for (Integer shardId : shardIds) {
TermVectorsResponse tvResponse = client().prepareTermVectors().setIndex("test").setType("type1").setPreference("_shards:" + shardId).setDoc(jsonBuilder().startObject().field("field1", "random permutation").endObject()).setFieldStatistics(true).setTermStatistics(true).get();
Fields fields = tvResponse.getFields();
Terms terms = fields.terms("field1");
assertNotNull(terms);
TermsEnum termsEnum = terms.iterator();
while (termsEnum.next() != null) {
sumTotalTermFreq += termsEnum.totalTermFreq();
sumDocFreq += termsEnum.docFreq();
}
}
assertEquals("expected to find term statistics in exactly one shard!", 2, sumTotalTermFreq);
assertEquals("expected to find term statistics in exactly one shard!", 2, sumDocFreq);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.settings.Settings in project elasticsearch by elastic.
the class GetTermVectorsIT method testPerFieldAnalyzer.
public void testPerFieldAnalyzer() throws IOException {
int numFields = 25;
// setup mapping and document source
Set<String> withTermVectors = new HashSet<>();
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties");
XContentBuilder source = jsonBuilder().startObject();
for (int i = 0; i < numFields; i++) {
String fieldName = "field" + i;
if (randomBoolean()) {
withTermVectors.add(fieldName);
}
mapping.startObject(fieldName).field("type", "text").field("term_vector", withTermVectors.contains(fieldName) ? "yes" : "no").endObject();
source.field(fieldName, "some text here");
}
source.endObject();
mapping.endObject().endObject().endObject();
// setup indices with mapping
Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard");
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(settings).addMapping("type1", mapping));
ensureGreen();
// index a single document with prepared source
client().prepareIndex("test", "type1", "0").setSource(source).get();
refresh();
// create random per_field_analyzer and selected fields
Map<String, String> perFieldAnalyzer = new HashMap<>();
Set<String> selectedFields = new HashSet<>();
for (int i = 0; i < numFields; i++) {
if (randomBoolean()) {
perFieldAnalyzer.put("field" + i, "keyword");
}
if (randomBoolean()) {
perFieldAnalyzer.put("non_existing" + i, "keyword");
}
if (randomBoolean()) {
selectedFields.add("field" + i);
}
if (randomBoolean()) {
selectedFields.add("non_existing" + i);
}
}
// selected fields not specified
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setPerFieldAnalyzer(perFieldAnalyzer).get();
// should return all fields that have terms vectors, some with overridden analyzer
checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer);
// selected fields specified including some not in the mapping
response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY)).setPerFieldAnalyzer(perFieldAnalyzer).get();
// should return only the specified valid fields, with some with overridden analyzer
checkAnalyzedFields(response.getFields(), selectedFields, perFieldAnalyzer);
}
Aggregations