use of org.opensearch.cluster.shards.ShardCounts in project OpenSearch by opensearch-project.
the class ShardLimitValidatorTests method testValidateShardLimit.
public void testValidateShardLimit() {
int nodesInCluster = randomIntBetween(2, 90);
ShardCounts counts = forDataNodeCount(nodesInCluster);
ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), counts.getFailingIndexShards(), counts.getFailingIndexReplicas());
Index[] indices = Arrays.stream(state.metadata().indices().values().toArray(IndexMetadata.class)).map(IndexMetadata::getIndex).collect(Collectors.toList()).toArray(new Index[2]);
int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas());
int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas());
int maxShards = counts.getShardsPerNode() * nodesInCluster;
ShardLimitValidator shardLimitValidator = createTestShardLimitService(counts.getShardsPerNode());
ValidationException exception = expectThrows(ValidationException.class, () -> shardLimitValidator.validateShardLimit(state, indices));
assertEquals("Validation Failed: 1: this action would add [" + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open;", exception.getMessage());
}
use of org.opensearch.cluster.shards.ShardCounts in project OpenSearch by opensearch-project.
the class ShardLimitValidatorTests method testOverShardLimit.
public void testOverShardLimit() {
int nodesInCluster = randomIntBetween(1, 90);
ShardCounts counts = forDataNodeCount(nodesInCluster);
Settings clusterSettings = Settings.builder().build();
ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas());
int shardsToAdd = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas());
Optional<String> errorMessage = ShardLimitValidator.checkShardLimit(shardsToAdd, state, counts.getShardsPerNode());
int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas());
int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas());
int maxShards = counts.getShardsPerNode() * nodesInCluster;
assertTrue(errorMessage.isPresent());
assertEquals("this action would add [" + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open", errorMessage.get());
}
use of org.opensearch.cluster.shards.ShardCounts in project OpenSearch by opensearch-project.
the class ShardLimitValidatorTests method testNonSystemIndexOpeningFails.
/**
* This test validates that non-system index opening
* fails when it exceeds the cluster max shard limit
*/
public void testNonSystemIndexOpeningFails() {
int nodesInCluster = randomIntBetween(2, 90);
ShardCounts counts = forDataNodeCount(nodesInCluster);
ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), counts.getFailingIndexShards(), counts.getFailingIndexReplicas());
Index[] indices = Arrays.stream(state.metadata().indices().values().toArray(IndexMetadata.class)).map(IndexMetadata::getIndex).collect(Collectors.toList()).toArray(new Index[2]);
int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas());
int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas());
int maxShards = counts.getShardsPerNode() * nodesInCluster;
ShardLimitValidator shardLimitValidator = createTestShardLimitService(counts.getShardsPerNode());
ValidationException exception = expectThrows(ValidationException.class, () -> shardLimitValidator.validateShardLimit(state, indices));
assertEquals("Validation Failed: 1: this action would add [" + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open;", exception.getMessage());
}
use of org.opensearch.cluster.shards.ShardCounts in project OpenSearch by opensearch-project.
the class ShardLimitValidatorTests method testUnderShardLimit.
public void testUnderShardLimit() {
int nodesInCluster = randomIntBetween(2, 90);
// Calculate the counts for a cluster 1 node smaller than we have to ensure we have headroom
ShardCounts counts = forDataNodeCount(nodesInCluster - 1);
Settings clusterSettings = Settings.builder().build();
ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas());
int existingShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas());
int shardsToAdd = randomIntBetween(1, (counts.getShardsPerNode() * nodesInCluster) - existingShards);
Optional<String> errorMessage = ShardLimitValidator.checkShardLimit(shardsToAdd, state, counts.getShardsPerNode());
assertFalse(errorMessage.isPresent());
}
use of org.opensearch.cluster.shards.ShardCounts in project OpenSearch by opensearch-project.
the class ShardLimitValidatorTests method testSystemIndexOpeningSucceeds.
/**
* This test validates that system index opening succeeds
* even when it exceeds the cluster max shard limit
*/
public void testSystemIndexOpeningSucceeds() {
int nodesInCluster = randomIntBetween(2, 90);
ShardCounts counts = forDataNodeCount(nodesInCluster);
ClusterState state = createClusterForShardLimitTest(nodesInCluster, randomAlphaOfLengthBetween(5, 15), counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), // Adding closed system index to cluster state
".tasks", counts.getFailingIndexShards(), counts.getFailingIndexReplicas());
Index[] indices = Arrays.stream(state.metadata().indices().values().toArray(IndexMetadata.class)).map(IndexMetadata::getIndex).collect(Collectors.toList()).toArray(new Index[2]);
// Shard limit validation succeeds without any issues as system index is being opened
ShardLimitValidator shardLimitValidator = createTestShardLimitService(counts.getShardsPerNode());
shardLimitValidator.validateShardLimit(state, indices);
}
Aggregations